diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/cli.py b/.buildkite/dagster-buildkite/dagster_buildkite/cli.py index 243c2bf28c73f..2b80a634ad8d7 100755 --- a/.buildkite/dagster-buildkite/dagster_buildkite/cli.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/cli.py @@ -2,9 +2,10 @@ from dagster_buildkite.git import GitInfo from dagster_buildkite.pipelines.dagster_oss_main import build_dagster_oss_main_steps +from dagster_buildkite.pipelines.dagster_oss_nightly_pipeline import build_dagster_oss_nightly_steps +from dagster_buildkite.pipelines.prerelease_package import build_prerelease_package_steps from dagster_buildkite.python_packages import PythonPackages - -from .utils import buildkite_yaml_for_steps +from dagster_buildkite.utils import buildkite_yaml_for_steps CLI_HELP = """This CLI is used for generating Buildkite YAML. Each function corresponds to an entry point defined in `setup.py`. Buildkite invokes these entry points when loading the specification for @@ -17,3 +18,17 @@ def dagster() -> None: steps = build_dagster_oss_main_steps() buildkite_yaml = buildkite_yaml_for_steps(steps) print(buildkite_yaml) # noqa: T201 + + +def dagster_nightly() -> None: + PythonPackages.load_from_git(GitInfo(directory=Path("."))) + steps = build_dagster_oss_nightly_steps() + buildkite_yaml = buildkite_yaml_for_steps(steps, custom_slack_channel="eng-buildkite-nightly") + print(buildkite_yaml) # noqa: T201 + + +def prerelease_package() -> None: + PythonPackages.load_from_git(GitInfo(directory=Path("."))) + steps = build_prerelease_package_steps() + buildkite_yaml = buildkite_yaml_for_steps(steps) + print(buildkite_yaml) # noqa: T201 diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py b/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py index fd675a039b87a..84528cbeb9163 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py @@ -9,11 +9,10 @@ from dagster_buildkite.git import ChangedFiles from dagster_buildkite.python_packages import PythonPackages, changed_filetypes - -from .python_version import AvailablePythonVersion -from .step_builder import BuildkiteQueue -from .steps.tox import build_tox_step -from .utils import ( +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import BuildkiteQueue +from dagster_buildkite.steps.tox import build_tox_step +from dagster_buildkite.utils import ( BuildkiteLeafStep, BuildkiteTopLevelStep, GroupStep, @@ -165,9 +164,9 @@ def build_steps(self) -> List[BuildkiteTopLevelStep]: pytest_python_versions = sorted( list(set(default_python_versions) - set(unsupported_python_versions)) ) - # Use lowest supported python version if no defaults match. + # Use highest supported python version if no defaults_match if len(pytest_python_versions) == 0: - pytest_python_versions = [supported_python_versions[0]] + pytest_python_versions = [supported_python_versions[-1]] for py_version in pytest_python_versions: version_factor = AvailablePythonVersion.to_tox_factor(py_version) diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_main.py b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_main.py index 7d73afd4f19dc..fc8dc0426c6a2 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_main.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_main.py @@ -10,7 +10,7 @@ ) from dagster_buildkite.steps.docs import build_docs_steps from dagster_buildkite.steps.trigger import build_trigger_step -from dagster_buildkite.utils import BuildkiteStep, is_release_branch, safe_getenv +from dagster_buildkite.utils import BuildkiteStep, is_release_branch, message_contains, safe_getenv def build_dagster_oss_main_steps() -> List[BuildkiteStep]: @@ -33,8 +33,11 @@ def build_dagster_oss_main_steps() -> List[BuildkiteStep]: pipeline_name = "oss-internal-compatibility" trigger_branch = _get_setting("INTERNAL_BRANCH") or "master" async_step = False - # Use OSS_COMPAT_SLIM by default unless an internal branch is explicitly specified - oss_compat_slim = _get_setting("OSS_COMPAT_SLIM") or not _get_setting("INTERNAL_BRANCH") + # Use OSS_COMPAT_SLIM by default unless an internal branch is explicitly specified or + # the commit message contains "NO_SKIP" + oss_compat_slim = _get_setting("OSS_COMPAT_SLIM") or not ( + _get_setting("INTERNAL_BRANCH") or message_contains("NO_SKIP") + ) steps.append( build_trigger_step( diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_nightly_pipeline.py b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_nightly_pipeline.py new file mode 100644 index 0000000000000..23dc34b5f87ca --- /dev/null +++ b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/dagster_oss_nightly_pipeline.py @@ -0,0 +1,48 @@ +from typing import List + +from dagster_buildkite.package_spec import PackageSpec +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.steps.packages import ( + build_steps_from_package_specs, + gcp_creds_extra_cmds, + k8s_extra_cmds, +) +from dagster_buildkite.utils import BuildkiteStep + + +def build_dagster_oss_nightly_steps() -> List[BuildkiteStep]: + steps: List[BuildkiteStep] = [] + + steps += build_steps_from_package_specs( + [ + PackageSpec( + "python_modules/libraries/dagster-dbt", + pytest_tox_factors=["dbt18-snowflake", "dbt18-bigquery"], + env_vars=[ + "SNOWFLAKE_ACCOUNT", + "SNOWFLAKE_USER", + "SNOWFLAKE_PASSWORD", + "GCP_PROJECT_ID", + ], + pytest_extra_cmds=gcp_creds_extra_cmds, + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "python_modules/libraries/dagster-k8s", + env_vars=[ + "AWS_ACCOUNT_ID", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "BUILDKITE_SECRETS_BUCKET", + ], + pytest_tox_factors=[ + "nightly", + ], + pytest_extra_cmds=k8s_extra_cmds, + ), + ] + ) + + return steps diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/prerelease_package.py b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/prerelease_package.py new file mode 100644 index 0000000000000..7bda9c847b57d --- /dev/null +++ b/.buildkite/dagster-buildkite/dagster_buildkite/pipelines/prerelease_package.py @@ -0,0 +1,70 @@ +import re +from pathlib import Path +from typing import List + +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.steps.packages import _get_uncustomized_pkg_roots +from dagster_buildkite.utils import BlockStep, BuildkiteStep + + +def build_prerelease_package_steps() -> List[BuildkiteStep]: + steps: List[BuildkiteStep] = [] + + packages = ( + _get_uncustomized_pkg_roots("python_modules", []) + + _get_uncustomized_pkg_roots("python_modules/libraries", []) + + _get_uncustomized_pkg_roots("examples/experimental", []) + ) + + # Get only packages that have a fixed version in setup.py + filtered_packages = [] + for package in packages: + setup_file = Path(package) / "setup.py" + contents = setup_file.read_text() + if re.findall(r"version=\"[\d\.]+\"", contents): + filtered_packages.append(package) + + input_step: BlockStep = { + "block": ":question: Choose package", + "prompt": None, + "fields": [ + { + "select": "Select a package to publish", + "key": "package-to-release-path", + "options": [ + { + "label": package[len("python_modules/") :] + if package.startswith("python_modules/") + else package, + "value": package, + } + for package in filtered_packages + ], + "hint": None, + "default": None, + "required": True, + "multiple": None, + }, + { + "text": "Enter the version to publish", + "required": False, + "key": "version-to-release", + "default": None, + "hint": "Leave blank to auto-increment the minor version", + }, + ], + } + steps.append(input_step) + + steps.append( + CommandStepBuilder(":package: Build and publish package") + .run( + "pip install build", + "sh ./scripts/build_and_publish.sh", + ) + .on_test_image(AvailablePythonVersion.get_default(), env=["PYPI_TOKEN"]) + .build() + ) + + return steps diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/python_packages.py b/.buildkite/dagster-buildkite/dagster_buildkite/python_packages.py index 9853f0dd1a95a..af5aa93dfb75c 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/python_packages.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/python_packages.py @@ -1,10 +1,13 @@ +# pyright: reportUnnecessaryTypeIgnoreComment=false + import logging +import subprocess from distutils import core as distutils_core from importlib import reload from pathlib import Path from typing import Dict, Optional, Set -import pathspec +import tomli from pkg_resources import Requirement, parse_requirements from dagster_buildkite.git import ChangedFiles, GitInfo @@ -12,19 +15,45 @@ changed_filetypes = [".py", ".cfg", ".toml", ".yaml", ".ipynb", ".yml", ".ini", ".jinja"] +def _path_is_relative_to(p: Path, u: Path) -> bool: + # see https://docs.python.org/3/library/pathlib.html#pathlib.PurePath.is_relative_to + return u == p or u in p.parents + + class PythonPackage: - def __init__(self, setup_py_path: Path): - self.directory = setup_py_path.parent + def __init__(self, setup_path: Path): + self.directory = setup_path - # run_setup stores state in a global variable. Reload the module - # each time we use it - otherwise we'll get the previous invocation's - # distribution if our setup.py doesn't implement setup() correctly - reload(distutils_core) - distribution = distutils_core.run_setup(str(setup_py_path), stop_after="init") + if (setup_path / "setup.py").exists(): + # run_setup stores state in a global variable. Reload the module + # each time we use it - otherwise we'll get the previous invocation's + # distribution if our setup.py doesn't implement setup() correctly + reload(distutils_core) - self._install_requires = distribution.install_requires # type: ignore[attr-defined] - self._extras_require = distribution.extras_require # type: ignore[attr-defined] - self.name = distribution.get_name() + distribution = distutils_core.run_setup(str(setup_path), stop_after="init") + + self._install_requires = distribution.install_requires # type: ignore[attr-defined] + self._extras_require = distribution.extras_require # type: ignore[attr-defined] + self.name = distribution.get_name() + else: + pyproject_toml = setup_path / "pyproject.toml" + assert ( + pyproject_toml.exists() + ), f"expected pyproject.toml to exist in directory {setup_path}" + + try: + with open(pyproject_toml, "rb") as f: + project = tomli.load(f)["project"] + except KeyError: + # this directory has a pyproject.toml but isn't really a python projects, + # ie docs/ + self.name = setup_path.name + self._install_requires = [] + self._extras_require = {} + else: + self.name = project["name"] + self._install_requires = project["dependnecies"] + self._extras_require = project.get("optional-dependencies", {}) @property def install_requires(self) -> Set[Requirement]: @@ -112,22 +141,18 @@ def load_from_git(cls, git_info: GitInfo) -> None: logging.info("Finding Python packages:") - git_ignore = git_info.directory / ".gitignore" - - if git_ignore.exists(): - ignored = git_ignore.read_text().splitlines() - git_ignore_spec = pathspec.PathSpec.from_lines("gitwildmatch", ignored) - else: - git_ignore_spec = pathspec.PathSpec([]) - # Consider any setup.py file to be a package - packages = set( - [ - PythonPackage(Path(setup)) - for setup in git_info.directory.rglob("setup.py") - if not git_ignore_spec.match_file(str(setup)) - ] - ) + output = subprocess.check_output( + ["git", "ls-files", "."], + cwd=str(git_info.directory), + ).decode("utf-8") + packages = [] + for file in output.split("\n"): + path = git_info.directory / Path(file) + if path.is_dir() and ( + (path / "setup.py").exists() or (path / "pyproject.toml").exists() + ): + packages.append(PythonPackage(path)) for package in sorted(packages): logging.info(" - " + package.name) @@ -140,7 +165,7 @@ def load_from_git(cls, git_info: GitInfo) -> None: for change in ChangedFiles.all: if ( # Our change is in this package's directory - (change in package.directory.rglob("*")) + _path_is_relative_to(change, package.directory) # The file can alter behavior - exclude things like README changes and (change.suffix in changed_filetypes) # The file is not part of a test suite. We treat this differently diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/python_version.py b/.buildkite/dagster-buildkite/dagster_buildkite/python_version.py index 15d46d4d30f54..529b59c32661c 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/python_version.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/python_version.py @@ -11,6 +11,7 @@ class AvailablePythonVersion(str, Enum): V3_9 = "3.9" V3_10 = "3.10" V3_11 = "3.11" + V3_12 = "3.12" @classmethod def get_all(cls) -> List["AvailablePythonVersion"]: diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/step_builder.py b/.buildkite/dagster-buildkite/dagster_buildkite/step_builder.py index 5da5aa0b1c1c2..f8ab1d790620e 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/step_builder.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/step_builder.py @@ -2,14 +2,14 @@ from enum import Enum from typing import Dict, List, Optional -from .images.versions import BUILDKITE_TEST_IMAGE_VERSION -from .python_version import AvailablePythonVersion -from .utils import CommandStep, safe_getenv +from dagster_buildkite.images.versions import BUILDKITE_TEST_IMAGE_VERSION +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.utils import CommandStep, safe_getenv DEFAULT_TIMEOUT_IN_MIN = 25 -DOCKER_PLUGIN = "docker#v3.7.0" -ECR_PLUGIN = "ecr#v2.2.0" +DOCKER_PLUGIN = "docker#v5.10.0" +ECR_PLUGIN = "ecr#v2.7.0" AWS_ACCOUNT_ID = os.environ.get("AWS_ACCOUNT_ID") @@ -42,6 +42,8 @@ def __init__( "retry": { "automatic": [ {"exit_status": -1, "limit": 2}, # agent lost + {"exit_status": 143, "limit": 2}, # agent lost + {"exit_status": 2, "limit": 2}, # often a uv read timeout {"exit_status": 255, "limit": 2}, # agent forced shut down ], "manual": {"permit_on_passed": True}, @@ -57,8 +59,8 @@ def run(self, *commands: str) -> "CommandStepBuilder": def _base_docker_settings(self) -> Dict[str, object]: return { "shell": ["/bin/bash", "-xeuc"], - "always-pull": True, "mount-ssh-agent": True, + "mount-buildkite-agent": True, } def on_python_image(self, image: str, env: Optional[List[str]] = None) -> "CommandStepBuilder": diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster.py index c6e8343a03b38..7294506ab8c33 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster.py @@ -2,25 +2,27 @@ from glob import glob from typing import List +from dagster_buildkite.defines import GIT_REPO_ROOT from dagster_buildkite.python_packages import PythonPackages - -from ..defines import GIT_REPO_ROOT -from ..python_version import AvailablePythonVersion -from ..step_builder import CommandStepBuilder -from ..utils import ( +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.steps.helm import build_helm_steps +from dagster_buildkite.steps.integration import build_integration_steps +from dagster_buildkite.steps.packages import build_library_packages_steps +from dagster_buildkite.steps.test_project import build_test_project_steps +from dagster_buildkite.utils import ( + UV_PIN, BuildkiteStep, CommandStep, + GroupStep, is_feature_branch, is_release_branch, safe_getenv, skip_if_no_non_docs_markdown_changes, + skip_if_no_pyright_requirements_txt_changes, skip_if_no_python_changes, skip_if_no_yaml_changes, ) -from .helm import build_helm_steps -from .integration import build_integration_steps -from .packages import build_library_packages_steps -from .test_project import build_test_project_steps branch_name = safe_getenv("BUILDKITE_BRANCH") @@ -77,9 +79,7 @@ def build_repo_wide_prettier_steps() -> List[CommandStep]: return [ CommandStepBuilder(":prettier: prettier") .run( - "pushd js_modules/dagster-ui/packages/eslint-config", - "yarn install", - "popd", + "make install_prettier", "make check_prettier", ) .on_test_image(AvailablePythonVersion.get_default()) @@ -102,17 +102,34 @@ def build_check_changelog_steps() -> List[CommandStep]: ] -def build_repo_wide_pyright_steps() -> List[CommandStep]: +def build_repo_wide_pyright_steps() -> List[BuildkiteStep]: return [ - CommandStepBuilder(":pyright: pyright") - .run( - "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain nightly -y", - "pip install -e python_modules/dagster[pyright] -e python_modules/dagster-pipes", - "make pyright", + GroupStep( + group=":pyright: pyright", + key="pyright", + steps=[ + CommandStepBuilder(":pyright: make pyright") + .run( + "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain nightly -y", + f'pip install -U "{UV_PIN}"', + "make install_pyright", + "make pyright", + ) + .on_test_image(AvailablePythonVersion.get_default()) + .with_skip(skip_if_no_python_changes(overrides=["pyright"])) + .build(), + CommandStepBuilder(":pyright: make rebuild_pyright_pins") + .run( + "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain nightly -y", + f'pip install -U "{UV_PIN}"', + "make install_pyright", + "make rebuild_pyright_pins", + ) + .on_test_image(AvailablePythonVersion.get_default()) + .with_skip(skip_if_no_pyright_requirements_txt_changes()) + .build(), + ], ) - .on_test_image(AvailablePythonVersion.get_default()) - .with_skip(skip_if_no_python_changes()) - .build(), ] diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster_ui.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster_ui.py index 789d659d101df..7611603a99e27 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster_ui.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/dagster_ui.py @@ -3,10 +3,9 @@ from dagster_buildkite.git import ChangedFiles from dagster_buildkite.package_spec import PackageSpec - -from ..python_version import AvailablePythonVersion -from ..step_builder import CommandStepBuilder -from ..utils import CommandStep, is_feature_branch +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.utils import CommandStep, is_feature_branch def skip_if_no_dagster_ui_components_changes(): @@ -28,6 +27,7 @@ def build_dagster_ui_components_steps() -> List[CommandStep]: CommandStepBuilder(":typescript: dagster-ui-components") .run( "cd js_modules/dagster-ui/packages/ui-components", + "pip install -U uv", f"tox -vv -e {AvailablePythonVersion.to_tox_factor(AvailablePythonVersion.get_default())}", ) .on_test_image(AvailablePythonVersion.get_default()) @@ -57,6 +57,7 @@ def build_dagster_ui_core_steps() -> List[CommandStep]: CommandStepBuilder(":typescript: dagster-ui-core") .run( "cd js_modules/dagster-ui", + "pip install -U uv", f"tox -vv -e {AvailablePythonVersion.to_tox_factor(AvailablePythonVersion.get_default())}", ) .on_test_image(AvailablePythonVersion.get_default()) diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/docs.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/docs.py index 4b62f708ce664..2a5f51e01adb1 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/docs.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/docs.py @@ -1,11 +1,18 @@ from typing import List +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.steps.packages import ( + build_dagster_ui_screenshot_steps, + build_example_packages_steps, +) from dagster_buildkite.steps.tox import build_tox_step - -from ..python_version import AvailablePythonVersion -from ..step_builder import CommandStepBuilder -from ..utils import BuildkiteLeafStep, BuildkiteStep, GroupStep, skip_if_no_docs_changes -from .packages import build_dagster_ui_screenshot_steps, build_example_packages_steps +from dagster_buildkite.utils import ( + BuildkiteLeafStep, + BuildkiteStep, + GroupStep, + skip_if_no_docs_changes, +) def build_docs_steps() -> List[BuildkiteStep]: @@ -39,6 +46,7 @@ def build_docs_steps() -> List[BuildkiteStep]: CommandStepBuilder("docs apidoc build") .run( "cd docs", + "pip install -U uv", "make apidoc-build", # "echo '--- Checking git diff (ignoring whitespace) after docs build...'", # "git diff --ignore-all-space --stat", diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/helm.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/helm.py index 6bfec4b75297a..a21df88a5b717 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/helm.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/helm.py @@ -1,10 +1,10 @@ import os from typing import List -from ..package_spec import PackageSpec -from ..python_version import AvailablePythonVersion -from ..step_builder import CommandStepBuilder -from ..utils import ( +from dagster_buildkite.package_spec import PackageSpec +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.utils import ( BuildkiteLeafStep, BuildkiteStep, CommandStep, @@ -23,6 +23,7 @@ def build_helm_steps() -> List[BuildkiteStep]: AvailablePythonVersion.V3_8, AvailablePythonVersion.V3_9, AvailablePythonVersion.V3_10, + AvailablePythonVersion.V3_11, ], name="dagster-helm", retries=2, diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py index 6329a673ffcc6..74c27cd8e0497 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/integration.py @@ -1,12 +1,16 @@ import os from typing import Callable, List, Optional, Union -import packaging.version - -from ..defines import GCP_CREDS_FILENAME, GCP_CREDS_LOCAL_FILE, LATEST_DAGSTER_RELEASE -from ..package_spec import PackageSpec, UnsupportedVersionsFunction -from ..python_version import AvailablePythonVersion -from ..utils import ( +from dagster_buildkite.defines import ( + GCP_CREDS_FILENAME, + GCP_CREDS_LOCAL_FILE, + LATEST_DAGSTER_RELEASE, +) +from dagster_buildkite.package_spec import PackageSpec, UnsupportedVersionsFunction +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import BuildkiteQueue +from dagster_buildkite.steps.test_project import test_project_depends_fn +from dagster_buildkite.utils import ( BuildkiteStep, BuildkiteTopLevelStep, connect_sibling_docker_container, @@ -14,7 +18,6 @@ library_version_from_core_version, network_buildkite_container, ) -from .test_project import test_project_depends_fn SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) DAGSTER_CURRENT_BRANCH = "current_branch" @@ -34,6 +37,7 @@ def build_integration_steps() -> List[BuildkiteStep]: steps += build_celery_k8s_suite_steps() steps += build_k8s_suite_steps() steps += build_daemon_suite_steps() + steps += build_auto_materialize_perf_suite_steps() return steps @@ -45,8 +49,6 @@ def build_integration_steps() -> List[BuildkiteStep]: def build_backcompat_suite_steps() -> List[BuildkiteTopLevelStep]: tox_factors = [ - "webserver-latest-release", - "webserver-earliest-release", "user-code-latest-release", "user-code-earliest-release", ] @@ -60,35 +62,18 @@ def build_backcompat_suite_steps() -> List[BuildkiteTopLevelStep]: def backcompat_extra_cmds(_, factor: str) -> List[str]: tox_factor_map = { - "webserver-latest-release": { - "webserver": LATEST_DAGSTER_RELEASE, - "user_code": DAGSTER_CURRENT_BRANCH, - }, - "webserver-earliest-release": { - "webserver": EARLIEST_TESTED_RELEASE, - "user_code": DAGSTER_CURRENT_BRANCH, - }, - "user-code-latest-release": { - "webserver": DAGSTER_CURRENT_BRANCH, - "user_code": LATEST_DAGSTER_RELEASE, - }, - "user-code-earliest-release": { - "webserver": DAGSTER_CURRENT_BRANCH, - "user_code": EARLIEST_TESTED_RELEASE, - }, + "user-code-latest-release": LATEST_DAGSTER_RELEASE, + "user-code-earliest-release": EARLIEST_TESTED_RELEASE, } - release_mapping = tox_factor_map[factor] - webserver_version = release_mapping["webserver"] + webserver_version = DAGSTER_CURRENT_BRANCH webserver_library_version = _get_library_version(webserver_version) - webserver_package = _infer_webserver_package(webserver_version) - user_code_version = release_mapping["user_code"] + user_code_version = tox_factor_map[factor] user_code_library_version = _get_library_version(user_code_version) user_code_definitions_file = _infer_user_code_definitions_files(user_code_version) return [ f"export EARLIEST_TESTED_RELEASE={EARLIEST_TESTED_RELEASE}", - f"export WEBSERVER_PACKAGE={webserver_package}", f"export USER_CODE_DEFINITIONS_FILE={user_code_definitions_file}", "pushd integration_tests/test_suites/backcompat-test-suite/webserver_service", " ".join( @@ -96,7 +81,6 @@ def backcompat_extra_cmds(_, factor: str) -> List[str]: "./build.sh", webserver_version, webserver_library_version, - webserver_package, user_code_version, user_code_library_version, user_code_definitions_file, @@ -113,24 +97,12 @@ def backcompat_extra_cmds(_, factor: str) -> List[str]: ] -def _infer_webserver_package(release: str) -> str: - """Returns `dagster-webserver` if on source or version >=1.3.14 (first dagster-webserver - release), `dagit` otherwise. - """ - if release == "current_branch": - return "dagster-webserver" +def _infer_user_code_definitions_files(user_code_release: str) -> str: + """Returns the definitions file to use for the user code release.""" + if user_code_release == EARLIEST_TESTED_RELEASE: + return "defs_for_earliest_tested_release.py" else: - version = packaging.version.parse(release) - return "dagit" if version < packaging.version.Version("1.3.14") else "dagster-webserver" - - -def _infer_user_code_definitions_files(release: str) -> str: - """Returns `repo.py` if on source or version >=1.0, `legacy_repo.py` otherwise.""" - if release == "current_branch": - return "repo.py" - else: - version = packaging.version.parse(release) - return "legacy_repo.py" if version < packaging.version.Version("1.0") else "repo.py" + return "defs_for_latest_release.py" def _get_library_version(version: str) -> str: @@ -148,19 +120,15 @@ def _get_library_version(version: str) -> str: def build_celery_k8s_suite_steps() -> List[BuildkiteTopLevelStep]: pytest_tox_factors = [ "-default", - "-markusercodedeploymentsubchart", - "-markdaemon", "-markredis", - "-markmonitoring", ] directory = os.path.join("integration_tests", "test_suites", "celery-k8s-test-suite") return build_integration_suite_steps( directory, pytest_tox_factors, + queue=BuildkiteQueue.DOCKER, # crashes on python 3.11/3.12 without additional resources always_run_if=has_helm_changes, - unsupported_python_versions=[ - AvailablePythonVersion.V3_11, # mysteriously causes buildkite agents to crash - ], + pytest_extra_cmds=celery_k8s_integration_suite_pytest_extra_cmds, ) @@ -179,6 +147,20 @@ def build_daemon_suite_steps(): ) +def build_auto_materialize_perf_suite_steps(): + pytest_tox_factors = None + directory = os.path.join("integration_tests", "test_suites", "auto_materialize_perf_tests") + return build_integration_suite_steps( + directory, + pytest_tox_factors, + unsupported_python_versions=[ + version + for version in AvailablePythonVersion.get_all() + if version != AvailablePythonVersion.V3_11 + ], + ) + + def daemon_pytest_extra_cmds(version: AvailablePythonVersion, _): return [ "export DAGSTER_DOCKER_IMAGE_TAG=$${BUILDKITE_BUILD_ID}-" + version, @@ -204,7 +186,10 @@ def build_k8s_suite_steps(): pytest_tox_factors = ["-default", "-subchart"] directory = os.path.join("integration_tests", "test_suites", "k8s-test-suite") return build_integration_suite_steps( - directory, pytest_tox_factors, always_run_if=has_helm_changes + directory, + pytest_tox_factors, + always_run_if=has_helm_changes, + pytest_extra_cmds=k8s_integration_suite_pytest_extra_cmds, ) @@ -223,7 +208,6 @@ def build_integration_suite_steps( Union[List[AvailablePythonVersion], UnsupportedVersionsFunction] ] = None, ) -> List[BuildkiteTopLevelStep]: - pytest_extra_cmds = pytest_extra_cmds or default_integration_suite_pytest_extra_cmds return PackageSpec( directory, env_vars=[ @@ -245,7 +229,15 @@ def build_integration_suite_steps( ).build_steps() -def default_integration_suite_pytest_extra_cmds(version: str, _) -> List[str]: +def k8s_integration_suite_pytest_extra_cmds(version: str, _) -> List[str]: + return [ + "export DAGSTER_DOCKER_IMAGE_TAG=$${BUILDKITE_BUILD_ID}-" + version, + 'export DAGSTER_DOCKER_REPOSITORY="$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-2.amazonaws.com"', + "aws ecr get-login --no-include-email --region us-west-2 | sh", + ] + + +def celery_k8s_integration_suite_pytest_extra_cmds(version: str, _) -> List[str]: cmds = [ 'export AIRFLOW_HOME="/airflow"', "mkdir -p $${AIRFLOW_HOME}", diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/packages.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/packages.py index c6c0d7e6458ec..e01a142c4e3c9 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/packages.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/packages.py @@ -1,6 +1,7 @@ import os from glob import glob -from typing import List, Optional +from pathlib import Path +from typing import Iterable, List, Optional from dagster_buildkite.defines import GCP_CREDS_FILENAME, GCP_CREDS_LOCAL_FILE, GIT_REPO_ROOT from dagster_buildkite.package_spec import PackageSpec @@ -22,11 +23,12 @@ def build_example_packages_steps() -> List[BuildkiteStep]: _get_uncustomized_pkg_roots("examples", custom_example_pkg_roots) + _get_uncustomized_pkg_roots("examples/experimental", custom_example_pkg_roots) ) + if pkg != "examples/deploy_ecs" ] example_packages = EXAMPLE_PACKAGES_WITH_CUSTOM_CONFIG + example_packages_with_standard_config - return _build_steps_from_package_specs(example_packages) + return build_steps_from_package_specs(example_packages) def build_library_packages_steps() -> List[BuildkiteStep]: @@ -44,18 +46,18 @@ def build_library_packages_steps() -> List[BuildkiteStep]: ], ] - return _build_steps_from_package_specs( + return build_steps_from_package_specs( LIBRARY_PACKAGES_WITH_CUSTOM_CONFIG + library_packages_with_standard_config ) def build_dagster_ui_screenshot_steps() -> List[BuildkiteStep]: - return _build_steps_from_package_specs( + return build_steps_from_package_specs( [PackageSpec("docs/dagster-ui-screenshot", run_pytest=False)] ) -def _build_steps_from_package_specs(package_specs: List[PackageSpec]) -> List[BuildkiteStep]: +def build_steps_from_package_specs(package_specs: List[PackageSpec]) -> List[BuildkiteStep]: steps: List[BuildkiteStep] = [] all_packages = sorted( package_specs, @@ -72,7 +74,7 @@ def _build_steps_from_package_specs(package_specs: List[PackageSpec]) -> List[Bu # Find packages under a root subdirectory that are not configured above. -def _get_uncustomized_pkg_roots(root, custom_pkg_roots) -> List[str]: +def _get_uncustomized_pkg_roots(root: str, custom_pkg_roots: List[str]) -> List[str]: all_files_in_root = [ os.path.relpath(p, GIT_REPO_ROOT) for p in glob(os.path.join(GIT_REPO_ROOT, root, "*")) ] @@ -231,7 +233,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: ] -gcp_extra_cmds = ( +gcp_creds_extra_cmds = ( [ rf"aws s3 cp s3://\${{BUILDKITE_SECRETS_BUCKET}}/{GCP_CREDS_FILENAME} " + GCP_CREDS_LOCAL_FILE, @@ -272,6 +274,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: AvailablePythonVersion.V3_9, AvailablePythonVersion.V3_10, AvailablePythonVersion.V3_11, + AvailablePythonVersion.V3_12, ], ), PackageSpec( @@ -287,43 +290,34 @@ def k8s_extra_cmds(version: str, _) -> List[str]: unsupported_python_versions=[ # dependency on 3.9-incompatible extension libs AvailablePythonVersion.V3_9, - # depends on some packages not yet available on python 3.11 - AvailablePythonVersion.V3_11, + # dagster-airflow dep + AvailablePythonVersion.V3_12, ], ), + PackageSpec( + "examples/docs_beta_snippets", + pytest_tox_factors=["all", "integrations"], + ), PackageSpec( "examples/project_fully_featured", unsupported_python_versions=[ - AvailablePythonVersion.V3_11, + AvailablePythonVersion.V3_12, # duckdb ], ), PackageSpec( "examples/with_great_expectations", - unsupported_python_versions=[ - # Issue with pinned of great_expectations - AvailablePythonVersion.V3_10, - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "examples/with_pyspark", - unsupported_python_versions=[ - # pyspark not yet 3.11 compatible - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "examples/with_pyspark_emr", - unsupported_python_versions=[ - # pyspark not yet 3.11 compatible - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "examples/with_wandb", unsupported_python_versions=[ - # wandb not yet 3.11 compatible - AvailablePythonVersion.V3_11, + # dagster-wandb dep + AvailablePythonVersion.V3_12, ], ), # The 6 tutorials referenced in cloud onboarding cant test "source" due to dagster-cloud dep @@ -334,6 +328,15 @@ def k8s_extra_cmds(version: str, _) -> List[str]: PackageSpec( "examples/assets_dbt_python", pytest_tox_factors=["pypi"], + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, # duckdb + ], + ), + PackageSpec( + "examples/assets_dynamic_partitions", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, # duckdb + ], ), PackageSpec( "examples/quickstart_aws", @@ -351,55 +354,110 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "examples/quickstart_snowflake", pytest_tox_factors=["pypi"], ), + PackageSpec( + "examples/experimental/dagster-blueprints", + ), + PackageSpec( + "examples/experimental/dagster-airlift", + unsupported_python_versions=[ + AvailablePythonVersion.V3_8, + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "examples/experimental/dagster-airlift/examples/dbt-example", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "examples/experimental/dagster-airlift/examples/perf-harness", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "examples/experimental/dagster-airlift/examples/tutorial-example", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "examples/experimental/dagster-airlift/examples/kitchen-sink", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, + ], + ), ] + +def _unsupported_dagster_python_versions(tox_factor: Optional[str]) -> List[AvailablePythonVersion]: + if tox_factor == "general_tests_old_protobuf": + return [AvailablePythonVersion.V3_11, AvailablePythonVersion.V3_12] + + if tox_factor in { + "type_signature_tests", + }: + return [AvailablePythonVersion.V3_12] + + return [] + + +def test_subfolders(tests_folder_name: str) -> Iterable[str]: + tests_path = ( + Path(__file__).parent + / Path("../../../../python_modules/dagster/dagster_tests/") + / Path(tests_folder_name) + ) + for subfolder in tests_path.iterdir(): + if subfolder.suffix == ".py" and subfolder.stem != "__init__": + raise Exception( + f"If you are splitting a test folder into parallel subfolders " + f"there should be no python files in the root of the folder. Found {subfolder}." + ) + if subfolder.is_dir(): + yield subfolder.name + + +def tox_factors_for_folder(tests_folder_name: str) -> List[str]: + return [ + f"{tests_folder_name}__{subfolder_name}" + for subfolder_name in test_subfolders(tests_folder_name) + ] + + LIBRARY_PACKAGES_WITH_CUSTOM_CONFIG: List[PackageSpec] = [ - PackageSpec("python_modules/automation"), + PackageSpec( + "python_modules/automation", + unsupported_python_versions=[AvailablePythonVersion.V3_12], + ), PackageSpec("python_modules/dagster-webserver", pytest_extra_cmds=ui_extra_cmds), PackageSpec( "python_modules/dagster", env_vars=["AWS_ACCOUNT_ID"], pytest_tox_factors=[ "api_tests", + "asset_defs_tests", "cli_tests", "core_tests_pydantic1", "core_tests_pydantic2", - "storage_tests_sqlalchemy_1_3", - "storage_tests_sqlalchemy_1_4", "daemon_sensor_tests", "daemon_tests", "definitions_tests", - "definitions_tests_pendulum_1", - "definitions_tests_pendulum_2", "general_tests", "general_tests_old_protobuf", + "launcher_tests", + "logging_tests", + "model_tests_pydantic1", + "model_tests_pydantic2", "scheduler_tests", - "scheduler_tests_pendulum_1", - "scheduler_tests_pendulum_2", - "execution_tests", "storage_tests", + "storage_tests_sqlalchemy_1_3", + "storage_tests_sqlalchemy_1_4", "type_signature_tests", - "asset_defs_tests", - "launcher_tests", - "logging_tests", - ], - unsupported_python_versions=( - lambda tox_factor: ( - [AvailablePythonVersion.V3_11] - if ( - tox_factor - in { - "general_tests_old_protobuf", # protobuf 3 not compatible with python 3.11 - "cli_tests", # test suite prone to hangs on unpinned grpcio version due to https://github.com/grpc/grpc/issues/31885 - } - ) - else ( - [AvailablePythonVersion.V3_8] # pendulum 3 not supported on python 3.8 - if tox_factor in {"scheduler_tests", "definitions_tests"} - else [] - ) - ) - ), + ] + + tox_factors_for_folder("execution_tests"), + unsupported_python_versions=_unsupported_dagster_python_versions, ), PackageSpec( "python_modules/dagster-graphql", @@ -440,18 +498,17 @@ def k8s_extra_cmds(version: str, _) -> List[str]: ), PackageSpec( "python_modules/dagster-test", + unsupported_python_versions=[ + # dagster-airflow + AvailablePythonVersion.V3_12, + ], ), PackageSpec( "python_modules/libraries/dagster-dbt", pytest_tox_factors=[ - "dbt_15X_legacy", - "dbt_16X_legacy", - "dbt_17X_legacy", - "dbt_15X", - "dbt_16X", - "dbt_17X", - "dbt_pydantic1", - "dbt_legacy_pydantic1", + f"{deps_factor}-{command_factor}" + for deps_factor in ["dbt17", "dbt18", "pydantic1"] + for command_factor in ["cloud", "core-main", "core-derived-metadata"] ], ), PackageSpec( @@ -460,6 +517,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "pydantic1", "pydantic2", ], + env_vars=["SNOWFLAKE_ACCOUNT", "SNOWFLAKE_USER", "SNOWFLAKE_PASSWORD"], ), PackageSpec( "python_modules/libraries/dagster-airbyte", @@ -471,6 +529,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: unsupported_python_versions=[ AvailablePythonVersion.V3_10, AvailablePythonVersion.V3_11, + AvailablePythonVersion.V3_12, ], env_vars=[ "AIRFLOW_HOME", @@ -482,9 +541,6 @@ def k8s_extra_cmds(version: str, _) -> List[str]: ], pytest_extra_cmds=airflow_extra_cmds, pytest_tox_factors=[ - "default-airflow1", - "localdb-airflow1", - "persistentdb-airflow1", "default-airflow2", "localdb-airflow2", "persistentdb-airflow2", @@ -502,18 +558,12 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "python_modules/libraries/dagster-celery", env_vars=["AWS_ACCOUNT_ID", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"], pytest_extra_cmds=celery_extra_cmds, - unsupported_python_versions=[ - AvailablePythonVersion.V3_11, # no celery support for 3.11 - ], ), PackageSpec( "python_modules/libraries/dagster-celery-docker", env_vars=["AWS_ACCOUNT_ID", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"], pytest_extra_cmds=celery_docker_extra_cmds, pytest_step_dependencies=test_project_depends_fn, - unsupported_python_versions=[ - AvailablePythonVersion.V3_11, # no celery support for 3.11 - ], ), PackageSpec( "python_modules/libraries/dagster-dask", @@ -521,10 +571,6 @@ def k8s_extra_cmds(version: str, _) -> List[str]: ), PackageSpec( "python_modules/libraries/dagster-databricks", - unsupported_python_versions=[ - # pyspark not supported on 3.11 - AvailablePythonVersion.V3_11, - ], pytest_tox_factors=[ "pydantic1", "pydantic2", @@ -536,11 +582,38 @@ def k8s_extra_cmds(version: str, _) -> List[str]: pytest_extra_cmds=docker_extra_cmds, pytest_step_dependencies=test_project_depends_fn, ), + PackageSpec( + "python_modules/libraries/dagster-duckdb", + unsupported_python_versions=[ + # duckdb + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "python_modules/libraries/dagster-duckdb-pandas", + unsupported_python_versions=[ + # duckdb + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "python_modules/libraries/dagster-duckdb-polars", + unsupported_python_versions=[ + # duckdb + AvailablePythonVersion.V3_12, + ], + ), PackageSpec( "python_modules/libraries/dagster-duckdb-pyspark", unsupported_python_versions=[ - # pyspark not supported on 3.11 - AvailablePythonVersion.V3_11, + # duckdb + AvailablePythonVersion.V3_12, + ], + ), + PackageSpec( + "python_modules/libraries/dagster-pandas", + unsupported_python_versions=[ + AvailablePythonVersion.V3_12, ], ), PackageSpec( @@ -551,7 +624,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "BUILDKITE_SECRETS_BUCKET", "GCP_PROJECT_ID", ], - pytest_extra_cmds=gcp_extra_cmds, + pytest_extra_cmds=gcp_creds_extra_cmds, # Remove once https://github.com/dagster-io/dagster/issues/2511 is resolved retries=2, ), @@ -563,7 +636,7 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "BUILDKITE_SECRETS_BUCKET", "GCP_PROJECT_ID", ], - pytest_extra_cmds=gcp_extra_cmds, + pytest_extra_cmds=gcp_creds_extra_cmds, retries=2, ), PackageSpec( @@ -574,18 +647,10 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "BUILDKITE_SECRETS_BUCKET", "GCP_PROJECT_ID", ], - pytest_extra_cmds=gcp_extra_cmds, - unsupported_python_versions=[ - # pyspark not supported on 3.11 - AvailablePythonVersion.V3_11, - ], + pytest_extra_cmds=gcp_creds_extra_cmds, ), PackageSpec( "python_modules/libraries/dagster-ge", - unsupported_python_versions=[ - # great-expectations not yet supported on 3.11 - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "python_modules/libraries/dagster-k8s", @@ -603,10 +668,6 @@ def k8s_extra_cmds(version: str, _) -> List[str]: ), PackageSpec( "python_modules/libraries/dagster-mlflow", - unsupported_python_versions=[ - # https://github.com/mlflow/mlflow/issues/7681 - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "python_modules/libraries/dagster-mysql", @@ -615,10 +676,6 @@ def k8s_extra_cmds(version: str, _) -> List[str]: "storage_tests", "storage_tests_sqlalchemy_1_3", ], - unsupported_python_versions=[ - # mysql-connector-python not supported on 3.11 - AvailablePythonVersion.V3_11, - ], always_run_if=has_storage_test_fixture_changes, ), PackageSpec( @@ -628,10 +685,6 @@ def k8s_extra_cmds(version: str, _) -> List[str]: PackageSpec( "python_modules/libraries/dagster-snowflake-pyspark", env_vars=["SNOWFLAKE_ACCOUNT", "SNOWFLAKE_BUILDKITE_PASSWORD"], - unsupported_python_versions=[ - # pyspark not supported on 3.11 - AvailablePythonVersion.V3_11, - ], ), PackageSpec( "python_modules/libraries/dagster-postgres", @@ -648,10 +701,21 @@ def k8s_extra_cmds(version: str, _) -> List[str]: # Remove once https://github.com/dagster-io/dagster/issues/2511 is resolved retries=2, ), + PackageSpec( + "python_modules/libraries/dagster-wandb", + unsupported_python_versions=[ + # duckdb + AvailablePythonVersion.V3_12, + ], + ), PackageSpec( "python_modules/libraries/dagstermill", pytest_tox_factors=["papermill1", "papermill2"], retries=2, # Workaround for flaky kernel issues + unsupported_python_versions=[ + # duckdb + AvailablePythonVersion.V3_12, + ], ), PackageSpec( ".buildkite/dagster-buildkite", diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/test_project.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/test_project.py index 88ac45f17d43b..dc73185d23cf2 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/test_project.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/test_project.py @@ -1,14 +1,14 @@ import os from typing import List, Optional, Set -from ..defines import GCP_CREDS_FILENAME, GCP_CREDS_LOCAL_FILE -from ..images.versions import ( +from dagster_buildkite.defines import GCP_CREDS_FILENAME, GCP_CREDS_LOCAL_FILE +from dagster_buildkite.images.versions import ( BUILDKITE_BUILD_TEST_PROJECT_IMAGE_IMAGE_VERSION, TEST_PROJECT_BASE_IMAGE_VERSION, ) -from ..python_version import AvailablePythonVersion -from ..step_builder import CommandStepBuilder -from ..utils import BuildkiteLeafStep, GroupStep +from dagster_buildkite.python_version import AvailablePythonVersion +from dagster_buildkite.step_builder import CommandStepBuilder +from dagster_buildkite.utils import BuildkiteLeafStep, GroupStep # Some python packages depend on these images but we don't explicitly define that dependency anywhere other # than when we construct said package's Buildkite steps. Until we more explicitly define those dependencies @@ -66,10 +66,8 @@ def build_test_project_steps() -> List[GroupStep]: "docker push $${TEST_PROJECT_IMAGE}", ) .on_python_image( - "buildkite-build-test-project-image:py{python_version}-{image_version}".format( - python_version=AvailablePythonVersion.V3_8, # py version can be bumped when rebuilt - image_version=BUILDKITE_BUILD_TEST_PROJECT_IMAGE_IMAGE_VERSION, - ), + # py version can be bumped when rebuilt + f"buildkite-build-test-project-image:py{AvailablePythonVersion.V3_8}-{BUILDKITE_BUILD_TEST_PROJECT_IMAGE_IMAGE_VERSION}", [ "AIRFLOW_HOME", "AWS_ACCOUNT_ID", diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/tox.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/tox.py index 96df87885cae1..dfeeb16cea1f1 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/tox.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/tox.py @@ -5,7 +5,7 @@ from dagster_buildkite.python_version import AvailablePythonVersion from dagster_buildkite.step_builder import BuildkiteQueue, CommandStepBuilder -from dagster_buildkite.utils import CommandStep, make_buildkite_section_header +from dagster_buildkite.utils import UV_PIN, CommandStep, make_buildkite_section_header _COMMAND_TYPE_TO_EMOJI_MAP = { "pytest": ":pytest:", @@ -51,6 +51,7 @@ def build_tox_step( commands = [ *(extra_commands_pre or []), f"cd {root_dir}", + f'pip install --force-reinstall "{UV_PIN}"', f"echo -e {shlex.quote(buildkite_section_header)}", tox_command, *(extra_commands_post or []), diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/trigger.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/trigger.py index d58a1986e195e..3f6087cdbfd38 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/trigger.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/trigger.py @@ -1,6 +1,6 @@ from typing import Dict, List, Optional -from ..utils import TriggerStep, safe_getenv +from dagster_buildkite.utils import TriggerStep, safe_getenv def build_trigger_step( diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/steps/wait.py b/.buildkite/dagster-buildkite/dagster_buildkite/steps/wait.py index 1b453352a5680..e406c62219a7d 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/steps/wait.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/steps/wait.py @@ -1,4 +1,4 @@ -from ..utils import WaitStep +from dagster_buildkite.utils import WaitStep def build_wait_step() -> WaitStep: diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/utils.py b/.buildkite/dagster-buildkite/dagster_buildkite/utils.py index 77a5f933a6af9..2b6cb869423ed 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/utils.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/utils.py @@ -3,7 +3,7 @@ import os import subprocess from pathlib import Path -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional, Sequence, Union import packaging.version import yaml @@ -63,10 +63,45 @@ class GroupStep(TypedDict): WaitStep: TypeAlias = Literal["wait"] -BuildkiteStep: TypeAlias = Union[CommandStep, GroupStep, TriggerStep, WaitStep] +InputSelectOption = TypedDict("InputSelectOption", {"label": str, "value": str}) +InputSelectField = TypedDict( + "InputSelectField", + { + "select": str, + "key": str, + "options": List[InputSelectOption], + "hint": Optional[str], + "default": Optional[str], + "required": Optional[bool], + "multiple": Optional[bool], + }, +) +InputTextField = TypedDict( + "InputTextField", + { + "text": str, + "key": str, + "hint": Optional[str], + "default": Optional[str], + "required": Optional[bool], + }, +) + +BlockStep = TypedDict( + "BlockStep", + { + "block": str, + "prompt": Optional[str], + "fields": List[Union[InputSelectField, InputTextField]], + }, +) + +BuildkiteStep: TypeAlias = Union[CommandStep, GroupStep, TriggerStep, WaitStep, BlockStep] BuildkiteLeafStep = Union[CommandStep, TriggerStep, WaitStep] BuildkiteTopLevelStep = Union[CommandStep, GroupStep] +UV_PIN = "uv==0.4.8" + def is_command_step(step: BuildkiteStep) -> TypeGuard[CommandStep]: return isinstance(step, dict) and "commands" in step @@ -82,7 +117,9 @@ def safe_getenv(env_var: str) -> str: return os.environ[env_var] -def buildkite_yaml_for_steps(steps) -> str: +def buildkite_yaml_for_steps( + steps: Sequence[BuildkiteStep], custom_slack_channel: Optional[str] = None +) -> str: return yaml.dump( { "env": { @@ -101,7 +138,17 @@ def buildkite_yaml_for_steps(steps) -> str: ), } for buildkite_email, slack_channel in BUILD_CREATOR_EMAIL_TO_SLACK_CHANNEL_MAP.items() - ], + ] + + ( + [ + { + "slack": f"elementl#{custom_slack_channel}", + "if": "build.state != 'canceled'", + } + ] + if custom_slack_channel + else [] + ), }, default_flow_style=False, ) @@ -193,17 +240,41 @@ def get_commit(rev): return subprocess.check_output(["git", "rev-parse", "--short", rev]).decode("utf-8").strip() -def skip_if_no_python_changes(): +def skip_if_no_python_changes(overrides: Optional[Sequence[str]] = None): + if message_contains("NO_SKIP"): + return None + if not is_feature_branch(): return None if any(path.suffix == ".py" for path in ChangedFiles.all): return None + if overrides and any( + Path(override) in path.parents for override in overrides for path in ChangedFiles.all + ): + return None + return "No python changes" +def skip_if_no_pyright_requirements_txt_changes(): + if message_contains("NO_SKIP"): + return None + + if not is_feature_branch(): + return None + + if any(path.match("pyright/*/requirements.txt") for path in ChangedFiles.all): + return None + + return "No pyright requirements.txt changes" + + def skip_if_no_yaml_changes(): + if message_contains("NO_SKIP"): + return None + if not is_feature_branch(): return None @@ -214,6 +285,9 @@ def skip_if_no_yaml_changes(): def skip_if_no_non_docs_markdown_changes(): + if message_contains("NO_SKIP"): + return None + if not is_feature_branch(): return None @@ -238,6 +312,9 @@ def has_storage_test_fixture_changes(): def skip_if_no_helm_changes(): + if message_contains("NO_SKIP"): + return None + if not is_feature_branch(): return None @@ -256,6 +333,9 @@ def message_contains(substring: str) -> bool: def skip_if_no_docs_changes(): + if message_contains("NO_SKIP"): + return None + if not is_feature_branch(os.getenv("BUILDKITE_BRANCH")): return None diff --git a/.buildkite/dagster-buildkite/setup.py b/.buildkite/dagster-buildkite/setup.py index 0cbbc99dd9427..e4cef3a49d6b3 100644 --- a/.buildkite/dagster-buildkite/setup.py +++ b/.buildkite/dagster-buildkite/setup.py @@ -1,7 +1,7 @@ from setuptools import find_packages, setup setup( - name="dagster_buildkite", + name="dagster-buildkite", version="0.0.1", author="Dagster Labs", author_email="hello@dagsterlabs.com", @@ -18,6 +18,7 @@ packages=find_packages(exclude=["test"]), install_requires=[ "PyYAML", + "tomli", "packaging>=20.9", "requests", "typing_extensions>=4.2", @@ -29,6 +30,8 @@ entry_points={ "console_scripts": [ "dagster-buildkite = dagster_buildkite.cli:dagster", + "dagster-buildkite-nightly = dagster_buildkite.cli:dagster_nightly", + "dagster-buildkite-prerelease-package = dagster_buildkite.cli:prerelease_package", ] }, ) diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index b622d268bda67..42d58cf175904 100755 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -1,3 +1,5 @@ set -eu . ./.buildkite/scripts/docker_prune.sh + +export BUILDKITE_ANALYTICS_TOKEN=$DAGSTER_BUILDKITE_ANALYTICS_TOKEN diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index 93828a4e2c94e..0000000000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -/docs/** @erinkcochran87 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 9fc7f3ea48717..b67f4f5b67e10 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,3 +1,11 @@ ## Summary & Motivation ## How I Tested These Changes + +## Changelog + +Insert changelog entry or "NOCHANGELOG" here. + +- [ ] `NEW` _(added new feature or capability)_ +- [ ] `BUGFIX` _(fixed a bug)_ +- [ ] `DOCS` _(added or updated documentation)_ diff --git a/.github/example-lineage.png b/.github/example-lineage.png new file mode 100644 index 0000000000000..325f6dbea65b0 Binary files /dev/null and b/.github/example-lineage.png differ diff --git a/.github/styles b/.github/styles new file mode 120000 index 0000000000000..9549f4d10c299 --- /dev/null +++ b/.github/styles @@ -0,0 +1 @@ +docs/vale/styles/ \ No newline at end of file diff --git a/.github/workflows/build-dagster-university.yml b/.github/workflows/build-dagster-university.yml index 0a72b232aa356..369e16cdb281e 100644 --- a/.github/workflows/build-dagster-university.yml +++ b/.github/workflows/build-dagster-university.yml @@ -19,8 +19,11 @@ jobs: # Deploy to Vercel Previews on pull request - name: Get branch preview subdomain if: github.event_name == 'pull_request' + env: + HEAD_REF: ${{ github.head_ref }} + REF_NAME: ${{ github.ref_name }} run: | - BRANCH_PREVIEW_SUBDOMAIN=$(echo "${{ github.head_ref || github.ref_name }}" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/^-*//' | sed 's/-*$//') + BRANCH_PREVIEW_SUBDOMAIN=$(echo "${HEAD_REF:-$REF_NAME}" | sed -e 's/[^a-zA-Z0-9-]/-/g; s/^-*//; s/-*$//') echo "$BRANCH_PREVIEW_SUBDOMAIN" echo "BRANCH_PREVIEW_SUBDOMAIN=$BRANCH_PREVIEW_SUBDOMAIN" >> "${GITHUB_ENV}" @@ -30,11 +33,11 @@ jobs: - name: Checkout master branch if: github.event_name == 'push' && github.ref == 'refs/heads/master' - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Checkout PR branch if: github.event.pull_request - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Publish Preview to Vercel uses: amondnet/vercel-action@v25 @@ -51,7 +54,7 @@ jobs: # Deploy to Vercel Production on push to master branch - name: Checkout docs-prod branch if: github.event_name == 'push' && github.ref == 'refs/heads/master' - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Publish to Vercel Production uses: amondnet/vercel-action@v25 diff --git a/.github/workflows/build-docs-revamp.yml b/.github/workflows/build-docs-revamp.yml new file mode 100644 index 0000000000000..2d2410624a6b3 --- /dev/null +++ b/.github/workflows/build-docs-revamp.yml @@ -0,0 +1,107 @@ +name: Deploy Docs Revamp +on: + push: + branches: + - master + - docs-prod + paths: + - .github/workflows/build-docs-revamp.yml + - docs/docs-beta/** + - examples/docs_beta_snippets/** + - docs/sphinx/** + pull_request: + paths: + - docs/docs-beta/** + - examples/docs_beta_snippets/** + - .github/workflows/build-docs-revamp.yml + - docs/sphinx/** + +concurrency: + group: ${{ github.workflow}}-${{github.ref}} + cancel-in-progress: true + +jobs: + deploy: + runs-on: ubuntu-latest + # Deploy to Vercel Previews on pull request, push to master branch + steps: + - name: Get branch preview subdomain + env: + HEAD_REF: ${{ github.head_ref }} + REF_NAME: ${{ github.ref_name }} + if: | + github.event_name == 'pull_request' || + (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-') || startsWith(github.ref, 'refs/heads/docs-prod'))) + run: | + BRANCH_PREVIEW_SUBDOMAIN=$(echo "${HEAD_REF:-$REF_NAME}" | sed -e 's/[^a-zA-Z0-9-]/-/g; s/^-*//; s/-*$//' | cut -c1-63) + BRANCH_PREVIEW_SUBDOMAIN=$(echo $BRANCH_PREVIEW_SUBDOMAIN | sed 's/--/-/g; s/-$//') + echo "$BRANCH_PREVIEW_SUBDOMAIN" + echo "BRANCH_PREVIEW_SUBDOMAIN=$BRANCH_PREVIEW_SUBDOMAIN" >> "${GITHUB_ENV}" + + - name: Get fetch depth + run: | + if [[ ${{ github.event_name }} == 'pull_request' ]]; then + echo "FETCH_DEPTH=$(( ${{ github.event.pull_request.commits }} + 1 ))" >> $GITHUB_ENV + else + echo "FETCH_DEPTH=1" >> $GITHUB_ENV + fi + + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: ${{ env.FETCH_DEPTH }} + + - name: Install node + uses: actions/setup-node@v4 + with: + node-version: 18 + + - name: Lint Docs + run: | + cd docs/docs-beta + yarn install + yarn run lint + + - name: Get changed docs files for PR comment + if: ${{ github.event.pull_request }} + run: | + cd docs/docs-beta/docs + echo "Head ref is $GITHUB_HEAD_SHA" + git fetch origin $GITHUB_HEAD_SHA + # Compare the commit the branch is based on to its head to list changed files + CHANGED_MD_FILES=$(git diff --name-only HEAD~${{ github.event.pull_request.commits }} "$GITHUB_HEAD_SHA" -- '*.md' '*.mdx') + CHANGES_ENTRY=$(echo "$CHANGED_MD_FILES" | sed 's/\.mdx*$//' | sed 's/^docs\/docs-beta\/docs/- {{deploymentUrl}}/') + CHANGES_ENTRY=$(echo -e "Preview available at {{deploymentUrl}}\n\nDirect link to changed pages:\n$CHANGES_ENTRY") + echo "$CHANGES_ENTRY" + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) + echo "CHANGES_ENTRY<<$EOF" >> $GITHUB_ENV + echo "$CHANGES_ENTRY" >> $GITHUB_ENV + echo "$EOF" >> $GITHUB_ENV + env: + GITHUB_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + + - name: Publish Preview to Vercel + uses: amondnet/vercel-action@v25 + if: | + github.event_name == 'pull_request' || + (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-') || startsWith(github.ref, 'refs/heads/docs-prod'))) + with: + github-comment: ${{ github.event.pull_request && env.CHANGES_ENTRY || true }} + vercel-token: ${{ secrets.VERCEL_TOKEN }} + vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} + vercel-project-id: ${{ secrets.VERCEL_DOCS_NEXT_PROJECT_ID }} + github-token: ${{ secrets.GITHUB_TOKEN }} + scope: ${{ secrets.VERCEL_ORG_ID }} + + - name: Publish to Vercel Production + uses: amondnet/vercel-action@v25 + # only deploy to production on master (TODO: switch to docs-prod when beta goes live) + if: github.event_name == 'push' && github.ref == 'refs/heads/master' + with: + vercel-token: ${{ secrets.VERCEL_TOKEN }} + vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} + vercel-project-id: ${{ secrets.VERCEL_DOCS_NEXT_PROJECT_ID }} + vercel-args: "--prod" + github-token: ${{ secrets.GITHUB_TOKEN }} + scope: ${{ secrets.VERCEL_ORG_ID }} diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index b1373e3270c74..ce782ef24a1f9 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -7,12 +7,20 @@ on: - docs-prod # prod paths: - docs/** + - "!docs/docs-beta/**" # Exclude docs-beta + - examples/docs_snippets/** + - CHANGES.md + - .github/workflows/build-docs.yml pull_request: paths: - docs/** + - "!docs/docs-beta/**" # Exclude docs-beta + - examples/docs_snippets/** + - CHANGES.md + - .github/workflows/build-docs.yml concurrency: # Cancel in-progress runs on same branch - group: ${{ github.ref }} + group: ${{ github.workflow}}-${{github.ref}} cancel-in-progress: true jobs: deploy: @@ -20,27 +28,29 @@ jobs: steps: # Deploy to Vercel Previews on pull request, push to master branch, or push to release-* branch - name: Get branch preview subdomain + env: + HEAD_REF: ${{ github.head_ref }} + REF_NAME: ${{ github.ref_name }} if: | - github.event_name == 'pull_request' || - (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-'))) + github.event_name == 'pull_request' || + (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-') || startsWith(github.ref, 'refs/heads/docs-prod'))) run: | - BRANCH_PREVIEW_SUBDOMAIN=$(echo "${{ github.head_ref || github.ref_name }}" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/^-*//' | sed 's/-*$//') + BRANCH_PREVIEW_SUBDOMAIN=$(echo "${HEAD_REF:-$REF_NAME}" | sed -e 's/[^a-zA-Z0-9-]/-/g; s/^-*//; s/-*$//' | head -c 63) echo "$BRANCH_PREVIEW_SUBDOMAIN" echo "BRANCH_PREVIEW_SUBDOMAIN=$BRANCH_PREVIEW_SUBDOMAIN" >> "${GITHUB_ENV}" - name: Checkout master/release branch - if: github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-')) - uses: actions/checkout@v3 + if: github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-') || startsWith(github.ref, 'refs/heads/docs-prod')) + uses: actions/checkout@v4 - name: Get PR fetch depth if: ${{ github.event.pull_request }} run: echo "PR_FETCH_DEPTH=$(( ${{ github.event.pull_request.commits }} + 1 ))" >> "${GITHUB_ENV}" - name: Checkout PR branch - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: ${{ github.event.pull_request }} with: - ref: ${{ github.event.pull_request.head.ref }} fetch-depth: ${{ env.PR_FETCH_DEPTH }} - name: Get changed docs files for PR comment @@ -61,11 +71,17 @@ jobs: env: GITHUB_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + - name: Copy doc snippets to public directory + run: | + ls + mkdir -p docs/next/public/docs_snippets + cp -R examples/docs_snippets/docs_snippets docs/next/public/docs_snippets/ + - name: Publish Preview to Vercel uses: amondnet/vercel-action@v25 if: | - github.event_name == 'pull_request' || - (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-'))) + github.event_name == 'pull_request' || + (github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release-') || startsWith(github.ref, 'refs/heads/docs-prod'))) with: github-comment: ${{ github.event.pull_request && env.CHANGES_ENTRY || true }} vercel-token: ${{ secrets.VERCEL_TOKEN }} @@ -78,7 +94,13 @@ jobs: # Deploy to Vercel Production on push to docs-prod branch - name: Checkout docs-prod branch if: github.event_name == 'push' && github.ref == 'refs/heads/docs-prod' - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Copy doc snippets to public directory (Production) + if: github.event_name == 'push' && github.ref == 'refs/heads/docs-prod' + run: | + mkdir -p docs/next/public/docs_snippets + cp -R examples/docs_snippets/docs_snippets docs/next/public/docs_snippets/ - name: Publish to Vercel Production uses: amondnet/vercel-action@v25 diff --git a/.github/workflows/build-storybook-core.yml b/.github/workflows/build-storybook-core.yml index 615f449ff9287..df4111e94d7d2 100644 --- a/.github/workflows/build-storybook-core.yml +++ b/.github/workflows/build-storybook-core.yml @@ -14,11 +14,14 @@ jobs: steps: - name: Get branch preview subdomain if: github.event_name == 'pull_request' + env: + HEAD_REF: ${{ github.head_ref }} + REF_NAME: ${{ github.ref_name }} run: | - BRANCH_PREVIEW_SUBDOMAIN=$(echo "${{ github.head_ref || github.ref_name }}" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/^-*//' | sed 's/-*$//') + BRANCH_PREVIEW_SUBDOMAIN=$(echo "${HEAD_REF:-$REF_NAME}" | sed -e 's/[^a-zA-Z0-9-]/-/g; s/^-*//; s/-*$//') echo "$BRANCH_PREVIEW_SUBDOMAIN" echo "BRANCH_PREVIEW_SUBDOMAIN=$BRANCH_PREVIEW_SUBDOMAIN" >> "${GITHUB_ENV}" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: amondnet/vercel-action@v25 if: github.event_name == 'pull_request' with: diff --git a/.github/workflows/build-storybook-ui.yml b/.github/workflows/build-storybook-ui.yml index a7b108dc449e8..2ed6b4bc4ade2 100644 --- a/.github/workflows/build-storybook-ui.yml +++ b/.github/workflows/build-storybook-ui.yml @@ -14,11 +14,14 @@ jobs: steps: - name: Get branch preview subdomain if: github.event_name == 'pull_request' + env: + HEAD_REF: ${{ github.head_ref }} + REF_NAME: ${{ github.ref_name }} run: | - BRANCH_PREVIEW_SUBDOMAIN=$(echo "${{ github.head_ref || github.ref_name }}" | sed 's/[^a-zA-Z0-9-]/-/g' | sed 's/^-*//' | sed 's/-*$//') + BRANCH_PREVIEW_SUBDOMAIN=$(echo "${HEAD_REF:-$REF_NAME}" | sed -e 's/[^a-zA-Z0-9-]/-/g; s/^-*//; s/-*$//') echo "$BRANCH_PREVIEW_SUBDOMAIN" echo "BRANCH_PREVIEW_SUBDOMAIN=$BRANCH_PREVIEW_SUBDOMAIN" >> "${GITHUB_ENV}" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: amondnet/vercel-action@v25 if: github.event_name == 'pull_request' with: diff --git a/.github/workflows/vale.yml b/.github/workflows/vale.yml new file mode 100644 index 0000000000000..1aa485b2e1fd4 --- /dev/null +++ b/.github/workflows/vale.yml @@ -0,0 +1,37 @@ +name: Vale Docs +on: + pull_request: + paths: + - 'docs/docs-beta/**' + - .github/workflows/vale.yml + push: + branches: + - master + - docs-prod + paths: + - 'docs/docs-beta/**' + - .github/workflows/vale.yml + +concurrency: + group: ${{ github.workflow}}-${{github.ref}} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + vale: + name: runner / vale + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: errata-ai/vale-action@reviewdog + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + files: '["docs/docs-beta/docs"]' + vale_flags: "--config=docs/.vale.ini --minAlertLevel=warning" + fail_on_error: true + reporter: github-pr-review + + diff --git a/.gitignore b/.gitignore index 670072ed0b59d..680c845a06756 100644 --- a/.gitignore +++ b/.gitignore @@ -45,6 +45,7 @@ nosetests.xml coverage.xml *.cover .hypothesis/ +mlruns/ # Translations *.mo @@ -83,6 +84,7 @@ celerybeat-schedule .envrc # virtualenv +.direnv/ .venv venv/ ENV/ @@ -106,13 +108,13 @@ Pipfile.lock .mypy_cache/ tags +!python_modules/dagster/dagster/_core/definitions/tags .pytest_cache .DS_Store docs/_build python_modules/dagster/docs/_build -.vscode/ dagit_run_logs @@ -168,7 +170,7 @@ pythonenv*/ *.duckdb # PyRight config -pyrightconfig.json +pyrightconfig* # Scripts working directory scripts/.build diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 932be7194e4a3..e19c166b0dc5f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,34 @@ +default_stages: [pre-commit] # don't run on push by default repos: -- repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.1.7 +- repo: local hooks: - - id: ruff - args: [--fix, --exit-non-zero-on-fix] - types_or: [python, pyi, jupyter] - - id: ruff-format + - id: ruff-format + name: Ruff Format + entry: ruff + args: [format] + language: system + pass_filenames: false + - id: ruff-lint + name: Ruff Lint + entry: ruff + args: [check, --fix, --exit-non-zero-on-fix] + language: system + pass_filenames: false + - id: docs-mdx-format + name: Format Docs + language: system + entry: bash -c "cd docs && make mdx-format" + pass_filenames: false + files: ^docs/content + + # We do not use pyright's provided pre-commit hook because we need the environment management + # supplied by `scripts/run-pyright.py`. + - id: pyright + name: pyright + entry: make quick_pyright + stages: [pre-push] + # This means pre-commit will not try to install a new environment for this hook. It relies on + # having a pre-existing `make` installed (and scripts/run-pyright.py). + language: system + pass_filenames: false + types: [python] diff --git a/.vscode/.gitignore b/.vscode/.gitignore new file mode 100644 index 0000000000000..e1093a8037452 --- /dev/null +++ b/.vscode/.gitignore @@ -0,0 +1,7 @@ +* +!.gitignore +!extensions.json +!README.md +!settings.json.default +!launch.json.default +!tasks.json.default diff --git a/.vscode/README.md b/.vscode/README.md new file mode 100644 index 0000000000000..c718b0e0938a9 --- /dev/null +++ b/.vscode/README.md @@ -0,0 +1,27 @@ +# VS Code editor settings + +If you use VS Code, we recommend the following configuration: + +## Step 1: Install the recommended extensions + +The [recommended extensions](.vscode/extensions.json) will automatically show up when browsing +extensions for your editor. Install them. + +See [VS Code's documentation on recommended extensions](https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions) +to add new extensions to the recommended list. + +## Step 2: Enable the default editor settings + +The recommended editor settings, [`settings.json.default`](.vscode/settings.json.default), are not enabled by default. To +enable the default settings, copy the file to `.vscode/settings.json` to enable them for this +repository. These settings will override any existing user or workspace settings you have already +configured. + +If you already have existing settings, you can instead copy specific configuration from the +recommended editor settings to your user or workspace settings. + +See the [settings precedence](https://code.visualstudio.com/docs/getstarted/settings#_settings-precedence) for more details. + +Likewise, the recommended debugger entrypoints, [`launch.json.default`](.vscode/launch.json.default) +and [`tasks.json.default`](.vscode/tasks.json.default), are not enabled by default. To enable the +default, copy these files to `.vscode/launch.json` and `.vscode/tasks.json`. diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000000000..c219677cb68aa --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,30 @@ +{ + "recommendations": [ + /// Python development + // https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff + "charliermarsh.ruff", + + // https://marketplace.visualstudio.com/items?itemName=ms-python.python + "ms-python.python", + + // https://marketplace.visualstudio.com/items?itemName=ms-python.vscode-pylance + "ms-python.vscode-pylance", + + /// Typescript development + // https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint + "dbaeumer.vscode-eslint", + + // https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode + "esbenp.prettier-vscode", + + // https://marketplace.visualstudio.com/items?itemName=graphql.vscode-graphql + "graphql.vscode-graphql", + + /// General development + // https://marketplace.visualstudio.com/items?itemName=github.copilot + "github.copilot", + + // https://marketplace.visualstudio.com/items?itemName=eamodio.gitlens + "eamodio.gitlens" + ] +} diff --git a/.vscode/launch.json.default b/.vscode/launch.json.default new file mode 100644 index 0000000000000..969fd5881c11d --- /dev/null +++ b/.vscode/launch.json.default @@ -0,0 +1,56 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "name": "Dagster Backend", + "type": "debugpy", + "request": "launch", + "module": "dagster", + "args": ["dev"], + "env": {}, + "console": "integratedTerminal", + "cwd": "${workspaceFolder}", + "justMyCode": false, + "preLaunchTask": "Open Dagster", + "presentation": { + "group": "1_dagster" + } + }, + { + "name": "Dagster Frontend", + "type": "node", + "request": "launch", + "runtimeExecutable": "yarn", + "runtimeArgs": ["start"], + "cwd": "${workspaceFolder}/js_modules/dagster-ui", + "console": "integratedTerminal", + "env": { + "NEXT_PUBLIC_BACKEND_ORIGIN": "http://127.0.0.1:3000" + }, + "preLaunchTask": "Generate Dagster GraphQL Schema", + "presentation": { + "group": "1_dagster" + } + }, + { + "name": "Pytest", + "type": "debugpy", + "request": "launch", + "module": "pytest", + "args": ["${file}::${selectedText}"], + "presentation": { + "group": "0_development_mode" + } + } + ], + "compounds": [ + { + "name": "Dagster (Launch All)", + "configurations": ["Dagster Frontend", "Dagster Backend"], + "stopAll": true, + "presentation": { + "group": "0_development_mode" + } + } + ] +} diff --git a/.vscode/settings.json.default b/.vscode/settings.json.default new file mode 100644 index 0000000000000..6a89fa59936c7 --- /dev/null +++ b/.vscode/settings.json.default @@ -0,0 +1,62 @@ +{ + /// Autofix violations on-save. + // https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff + // Set this to be the path to your ruff executable, e.g. + // + // ```bash + // which ruff + // ``` + // + "ruff.path": [], + + // Set this to be the path to your python interpreter, e.g. + // + // ```bash + // pyenv which python + // ``` + // + "ruff.interpreter": [], + + // Format code on-save. + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll.ruff": "explicit", + "source.organizeImports.ruff": "explicit" + } + }, + + /// Configure linting. + // https://code.visualstudio.com/docs/python/linting#_general-linting-settings + "python.linting.enabled": true, + "python.linting.lintOnSave": true, + "python.linting.pylintEnabled": false, + "python.linting.mypyEnabled": true, + + /// Configure code analysis. + // https://code.visualstudio.com/docs/python/settings-reference#_code-analysis-settings + "python.languageServer": "Pylance", + "python.analysis.indexing": true, + "python.analysis.typeCheckingMode": "basic", + "python.analysis.autoImportCompletions": true, + "python.analysis.completeFunctionParens": true, + "python.analysis.inlayHints.variableTypes": true, + "python.analysis.inlayHints.functionReturnTypes": false, + + /// Configuration for front-end. + // https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint + "[javascript][typescript][typescriptreact]": { + "editor.defaultFormatter": "dbaeumer.vscode-eslint", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll.eslint": "explicit", + "source.organizeImports.eslint": "explicit", + "source.removeUnusedImports": "explicit" + } + }, + "[json][jsonc]": { + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true + } +} diff --git a/.vscode/tasks.json.default b/.vscode/tasks.json.default new file mode 100644 index 0000000000000..2df0104f534aa --- /dev/null +++ b/.vscode/tasks.json.default @@ -0,0 +1,24 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Generate Dagster GraphQL Schema", + "type": "shell", + "command": "yarn workspace @dagster-io/ui-core generate-graphql", + "options": { + "cwd": "${workspaceFolder}/js_modules/dagster-ui" + }, + "presentation": { + "reveal": "silent" + } + }, + { + "label": "Open Dagster", + "type": "shell", + "command": "while ! nc -z localhost 3000; do sleep 0.1; done; open http://localhost:3000", + "presentation": { + "reveal": "silent" + } + } + ] +} diff --git a/.yamllint.yaml b/.yamllint.yaml deleted file mode 100644 index 14b9a102b2062..0000000000000 --- a/.yamllint.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -extends: default - -rules: - comments-indentation: disable - line-length: - max: 130 diff --git a/CHANGES.md b/CHANGES.md index 0c7db73ca3c84..ddd39d63a5d55 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,1277 @@ # Changelog +## 1.8.9 (core) / 0.24.9 (libraries) + +### New + +- `AssetSpec` now has a `with_io_manager_key` method that returns an `AssetSpec` with the appropriate metadata entry to dictate the key for the IO manager used to load it. The deprecation warning for `SourceAsset` now references this method. +- Added a `max_runtime_seconds` configuration option to run monitoring, allowing you to specify that any run in your Dagster deployment should terminate if it exceeds a certain runtime. Prevoiusly, jobs had to be individually tagged with a `dagster/max_runtime` tag in order to take advantage of this feature. Jobs and runs can still be tagged in order to override this value for an individual run. +- It is now possible to set both `tags` and a custom `execution_fn` on a `ScheduleDefinition`. Schedule `tags` are intended to annotate the definition and can be used to search and filter in the UI. They will not be attached to run requests emitted from the schedule if a custom `execution_fn` is provided. If no custom `execution_fn` is provided, then for back-compatibility the tags will also be automatically attached to run requests emitted from the schedule. +- `SensorDefinition` and all of its variants/decorators now accept a `tags` parameter. The tags annotate the definition and can be used to search and filter in the UI. +- Added the `dagster definitions validate` command to Dagster CLI. This command validates if Dagster definitions are loadable. +- [dagster-databricks] Databricks Pipes now allow running tasks in existing clusters. + +### Bugfixes + +- Fixed an issue where calling `build_op_context` in a unit test would sometimes raise a `TypeError: signal handler must be signal.SIG_IGN, signal.SIG_DFL, or a callable object` Exception on process shutdown. +- [dagster-webserver] Fix an issue where the incorrect sensor/schedule state would appear when using `DefaultScheduleStatus.STOPPED` / `DefaultSensorStatus.STOPPED` after performing a reset. + +### Documentation + +- [dagster-pipes] Fixed inconsistencies in the k8s pipes example. +- [dagster-pandas-pyspark] Fixed example in the Spark/Pandas SDA guide. + +### Dagster Plus + +- Fixed an issue where users with Launcher permissions for a particular code location were not able to cancel backfills targeting only assets in that code location. +- Fixed an issue preventing long-running alerts from being sent when there was a quick subsequent run. + +## 1.8.8 (core) / 0.24.8 (libraries) + +### New + +- Added `--partition-range` option to `dagster asset materialize` CLI. This option only works for assets with single-run Backfill Policies. +- Added a new `.without()` method to `AutomationCondition.eager()`, `AutomationCondition.on_cron()`, and `AutomationCondition.on_missing()` which allows sub-conditions to be removed, e.g. `AutomationCondition.eager().without(AutomationCondition.in_latest_time_window())`. +- Added `AutomationCondition.on_missing()`, which materializes an asset partition as soon as all of its parent partitions are filled in. +- `pyproject.toml` can now load multiple Python modules as individual Code Locations. Thanks, [@bdart](https://github.com/bdart)! +- [ui] If a code location has errors, a button will be shown to view the error on any page in the UI. +- [dagster-adls2] The `ADLS2PickleIOManager` now accepts `lease_duration` configuration. Thanks, [@0xfabioo](https://github.com/0xfabioo)! +- [dagster-embedded-elt] Added an option to fetch row count metadata after running a Sling sync by calling `sling.replicate(...).fetch_row_count()`. +- [dagster-fivetran] The dagster-fivetran integration will now automatically pull and attach column schema metadata after each sync. + +### Bugfixes + +- Fixed an issue which could cause errors when using `AutomationCondition.any_downstream_condition()` with downstream `AutoMaterializePolicy` objects. +- Fixed an issue where `process_config_and_initialize` did not properly handle processing nested resource config. +- [ui] Fixed an issue that would cause some AutomationCondition evaluations to be labeled `DepConditionWrapperCondition` instead of the key that they were evaluated against. +- [dagster-webserver] Fixed an issue with code locations appearing in fluctuating incorrect state in deployments with multiple webserver processes. +- [dagster-embedded-elt] Fixed an issue where Sling column lineage did not correctly resolve int the Dagster UI. +- [dagster-k8s] The `wait_for_pod` check now waits until all pods are available, rather than erroneously returning after the first pod becomes available. Thanks [@easontm](https://github.com/easontm)! + +### Dagster Plus + +- Backfill daemon logs are now available in the "Coordinator Logs" tab in a backfill details page. +- Users without proper code location permissions can no longer edit sensor cursors. + +## 1.8.7 (core) / 0.24.7 (libraries) + +### New + +- The `AssetSpec` constructor now raises an error if an invalid group name is provided, instead of an error being raised when constructing the `Definitions` object. +- `dagster/relation_identifier` metadata is now automatically attached to assets which are stored using a DbIOManager. +- [ui] Streamlined the code location list view. +- [ui] The “group by” selection on the Timeline Overview page is now part of the query parameters, meaning it will be retained when linked to directly or when navigating between pages. +- [dagster-dbt] When instantiating `DbtCliResource`, the `project_dir` argument will now override the `DBT_PROJECT_DIR` environment variable if it exists in the local environment (thanks, [@marijncv](https://github.com/marijncv)!). +- [dagster-embedded-elt] dlt assets now generate `rows_loaded` metadata (thanks, [@kristianandre](https://github.com/kristianandre)!). +- Added support for pydantic version 1.9.0. + +### Bugfixes + +- Fixed a bug where setting `asset_selection=[]` on `RunRequest` objects yielded from sensors using `asset_selection` would select all assets instead of none. +- Fixed bug where the tick status filter for batch-fetched graphql sensors was not being respected. +- [examples] Fixed missing assets in `assets_dbt_python` example. +- [dagster-airbyte] Updated the op names generated for Airbyte assets to include the full connection ID, avoiding name collisions. +- [dagster-dbt] Fixed issue causing dagster-dbt to be unable to load dbt projects where the adapter did not have a `database` field set (thanks, [@dargmuesli](https://github.com/dargmuesli)!) +- [dagster-dbt] Removed a warning about not being able to load the `dbt.adapters.duckdb` module when loading dbt assets without that package installed. + +### Documentation + +- Fixed typo on the automation concepts page (thanks, [@oedokumaci](https://github.com/oedokumaci)!) + +### Dagster Plus + +- You may now wipe specific asset partitions directly from the execution context in user code by calling `DagsterInstance.wipe_asset_partitions`. +- Dagster+ users with a "Viewer" role can now create private catalog views. +- Fixed an issue where the default IOManager used by Dagster+ Serverless did not respect setting `allow_missing_partitions` as metadata on a downstream asset. + +## 1.8.6 (core) / 0.24.6 (libraries) + +### Bugfixes + +- Fixed an issue where runs in Dagster+ Serverless that materialized partitioned assets would sometimes fail with an `object has no attribute '_base_path'` error. +- [dagster-graphql] Fixed an issue where the `statuses` filter argument to the `sensorsOrError` GraphQL field was sometimes ignored when querying GraphQL for multiple sensors at the same time. + +## 1.8.5 (core) / 0.24.5 (libraries) + +### New + +- Updated multi-asset sensor definition to be less likely to timeout queries against the asset history storage. +- Consolidated the `CapturedLogManager` and `ComputeLogManager` APIs into a single base class. +- [ui] Added an option under user settings to clear client side indexeddb caches as an escape hatch for caching related bugs. +- [dagster-aws, dagster-pipes] Added a new `PipesECSClient` to allow Dagster to interface with ECS tasks. +- [dagster-dbt] Increased the default timeout when terminating a run that is running a `dbt` subprocess to wait 25 seconds for the subprocess to cleanly terminate. Previously, it would only wait 2 seconds. +- [dagster-sdf] Increased the default timeout when terminating a run that is running an `sdf` subprocess to wait 25 seconds for the subprocess to cleanly terminate. Previously, it would only wait 2 seconds. +- [dagster-sdf] Added support for caching and asset selection (Thanks, [akbog](https://github.com/akbog)!) +- [dagster-dlt] Added support for `AutomationCondition` using `DagsterDltTranslator.get_automation_condition()` (Thanks, [aksestok](https://github.com/aksestok)!) +- [dagster-k8s] Added support for setting `dagsterDaemon.runRetries.retryOnAssetOrOpFailure` to False in the Dagster Helm chart to [prevent op retries and run retries from simultaneously firing on the same failure.](https://docs.dagster.io/deployment/run-retries#combining-op-and-run-retries) +- [dagster-wandb] Removed usage of deprecated `recursive` parameter (Thanks, [chrishiste](https://github.com/chrishiste)!) + +### Bugfixes + +- [ui] Fixed a bug where in-progress runs from a backfill could not be terminated from the backfill UI. +- [ui] Fixed a bug that caused an "Asset must be part of at least one job" error when clicking on an external asset in the asset graph UI +- Fixed an issue where viewing run logs with the latest 5.0 release of the watchdog package raised an exception. +- [ui] Fixed issue causing the “filter to group” action in the lineage graph to have no effect. +- [ui] Fixed case sensitivity when searching for partitions in the launchpad. +- [ui] Fixed a bug which would redirect to the events tab for an asset if you loaded the partitions tab directly. +- [ui] Fixed issue causing runs to get skipped when paging through the runs list (Thanks, [@HynekBlaha](https://github.com/HynekBlaha)!) +- [ui] Fixed a bug where the asset catalog list view for a particular group would show all assets. +- [dagster-dbt] fix bug where empty newlines in raw dbt logs were not being handled correctly. +- [dagster-k8s, dagster-celery-k8s] Correctly set `dagster/image` label when image is provided from `user_defined_k8s_config`. (Thanks, [@HynekBlaha](https://github.com/HynekBlaha)!) +- [dagster-duckdb] Fixed an issue for DuckDB versions older than 1.0.0 where an unsupported configuration option, `custom_user_agent`, was provided by default +- [dagster-k8s] Fixed an issue where Kubernetes Pipes failed to create a pod if the op name contained capital or non-alphanumeric containers. +- [dagster-embedded-elt] Fixed an issue where dbt assets downstream of Sling were skipped + +### Deprecations + +- [dagser-aws]: Direct AWS API arguments in `PipesGlueClient.run` have been deprecated and will be removed in `1.9.0`. The new `params` argument should be used instead. + +### Dagster Plus + +- Fixed a bug that caused an error when loading the launchpad for a partition, when using Dagster+ with an agent with version below 1.8.2. +- Fixed an issue where terminating a Dagster+ Serverless run wouldn’t forward the termination signal to the job to allow it to cleanly terminate. + +## 1.8.4 (core) / 0.24.4 (libraries) + +### Bugfixes + +- Fixed an issue where viewing run logs with the latest 5.0 release of the watchdog package raised an exception. +- Fixed a bug that caused an "Asset must be part of at least one job" error when clicking on an external asset in the asset graph UI + +### Dagster Plus + +- The default io_manager on Serverless now supports the `allow_missing_partitions` configuration option. +- Fixed a bug that caused an error when loading the launchpad for a partition, when using in Dagster+ with an agent with version below 1.8.2 + +## 1.8.3 (core) / 0.24.3 (libraries) (YANKED - This version of Dagster resulted in errors when trying to launch runs that target individual asset partitions) + +### New + +- When different assets within a code location have different `PartitionsDefinition`s, there will no longer be an implicit asset job `__ASSET_JOB_...` for each `PartitionsDefinition`; there will just be one with all the assets. This reduces the time it takes to load code locations with assets with many different `PartitionsDefinition`s. + +## 1.8.2 (core) / 0.24.2 (libraries) + +### New + +- [ui] Improved performance of the Automation history view for partitioned assets +- [ui] You can now delete dynamic partitions for an asset from the ui +- [dagster-sdf] Added support for quoted table identifiers (Thanks, [@akbog](https://github.com/akbog)!) +- [dagster-openai] Add additional configuration options for the `OpenAIResource` (Thanks, [@chasleslr](https://github.com/chasleslr)!) +- [dagster-fivetran] Fivetran assets now have relation identifier metadata. + +### Bugfixes + +- [ui] Fixed a collection of broken links pointing to renamed Declarative Automation pages. +- [dagster-dbt] Fixed issue preventing usage of `MultiPartitionMapping` with `@dbt_assets` (Thanks, [@arookieds](https://github.com/arookieds)!) +- [dagster-azure] Fixed issue that would cause an error when configuring an `AzureBlobComputeLogManager` without a `secret_key` (Thanks, [@ion-elgreco](https://github.com/ion-elgreco) and [@HynekBlaha](https://github.com/HynekBlaha)!) + +### Documentation + +- Added API docs for `AutomationCondition` and associated static constructors. +- [dagster-deltalake] Corrected some typos in the integration reference (Thanks, [@dargmuesli](https://github.com/dargmuesli)!) +- [dagster-aws] Added API docs for the new `PipesCloudWatchMessageReader` + +# 1.8.1 (core) / 0.24.1 (libraries) + +### New + +- If the sensor daemon fails while submitting runs, it will now checkpoint its progress and attempt to submit the remaining runs on the next evaluation. +- `build_op_context` and `build_asset_context` now accepts a `run_tags` argument. +- Nested partially configured resources can now be used outside of `Definitions`. +- [ui] Replaced GraphQL Explorer with GraphiQL. +- [ui] The run timeline can now be grouped by job or by automation. +- [ui] For users in the experimental navigation flag, schedules and sensors are now in a single merged automations table. +- [ui] Logs can now be filtered by metadata keys and values. +- [ui] Logs for `RUN_CANCELED` events now display relevant error messages. +- [dagster-aws] The new `PipesCloudWatchMessageReader` can consume logs from CloudWatch as pipes messages. +- [dagster-aws] Glue jobs launched via pipes can be automatically canceled if Dagster receives a termination signal. +- [dagster-azure] `AzureBlobComputeLogManager` now supports service principals, thanks @[ion-elgreco](https://github.com/ion-elgreco)! +- [dagster-databricks] `dagster-databricks` now supports `databricks-sdk<=0.17.0`. +- [dagster-datahub] `dagster-datahub` now allows pydantic versions below 3.0.0, thanks @[kevin-longe-unmind](https://github.com/kevin-longe-unmind)! +- [dagster-dbt] The `DagsterDbtTranslator` class now supports a modfiying the `AutomationCondition` for dbt models by overriding `get_automation_condition`. +- [dagster-pandera] `dagster-pandera` now supports `polars`. +- [dagster-sdf] Table and columns tests can now be used as asset checks. +- [dagster-embedded-elt] Column metadata and lineage can be fetched on Sling assets by chaining the new `replicate(...).fetch_column_metadata()` method. +- [dagster-embedded-elt] dlt resource docstrings will now be used to populate asset descriptions, by default. +- [dagster-embedded-elt] dlt assets now generate column metadata. +- [dagster-embedded-elt] dlt transformers now refer to the base resource as upstream asset. +- [dagster-openai] `OpenAIResource` now supports `organization`, `project` and `base_url` for configurting the OpenAI client, thanks @[chasleslr](https://github.com/chasleslr)! +- [dagster-pandas][dagster-pandera][dagster-wandb] These libraries no longer pin `numpy<2`, thanks @[judahrand](https://github.com/judahrand)! + +### Bugfixes + +- Fixed a bug for job backfills using backfill policies that materialized multiple partitions in a single run would be launched multiple times. +- Fixed an issue where runs would sometimes move into a FAILURE state rather than a CANCELED state if an error occurred after a run termination request was started. +- [ui] Fixed a bug where an incorrect dialog was shown when canceling a backfill. +- [ui] Fixed the asset page header breadcrumbs for assets with very long key path elements. +- [ui] Fixed the run timeline time markers for users in timezones that have off-hour offsets. +- [ui] Fixed bar chart tooltips to use correct timezone for timestamp display. +- [ui] Fixed an issue introduced in the 1.8.0 release where some jobs created from graph-backed assets were missing the “View as Asset Graph” toggle in the Dagster UI. + +### Breaking Changes + +- [dagster-airbyte] `AirbyteCloudResource` now supports `client_id` and `client_secret` for authentication - the `api_key` approach is no longer supported. This is motivated by the [deprecation of portal.airbyte.com](https://reference.airbyte.com/reference/portalairbytecom-deprecation) on August 15, 2024. + +### Deprecations + +- [dagster-databricks] Removed deprecated authentication clients provided by `databricks-cli` and `databricks_api` +- [dagster-embedded-elt] Removed deprecated Sling resources `SlingSourceConnection`, `SlingTargetConnection` +- [dagster-embedded-elt] Removed deprecated Sling resources `SlingSourceConnection`, `SlingTargetConnection` +- [dagster-embedded-elt] Removed deprecated Sling methods `build_sling_assets`, and `sync` + +### Documentation + +- The Integrating Snowflake & dbt with Dagster+ Insights guide no longer erroneously references BigQuery, thanks @[dnxie12](https://github.com/dnxie12)! + +# 1.8.0 (core) / 0.24.0 (libraries) + +## Major changes since 1.7.0 (core) / 0.22.0 (libraries) + +### Core definition APIs + +- You can now pass `AssetSpec` objects to the `assets` argument of `Definitions`, to let Dagster know about assets without associated materialization functions. This replaces the experimental `external_assets_from_specs` API, as well as `SourceAsset`s, which are now deprecated. Unlike `SourceAsset`s, `AssetSpec`s can be used for non-materializable assets with dependencies on Dagster assets, such as BI dashboards that live downstream of warehouse tables that are orchestrated by Dagster. [[docs](https://docs.dagster.io/concepts/assets/external-assets)]. +- [Experimental] You can now merge `Definitions` objects together into a single larger `Definitions` object, using the new `Definitions.merge` API ([doc](https://docs.dagster.io/_apidocs/definitions#dagster.Definitions.merge)). This makes it easier to structure large Dagster projects, as you can construct a `Definitions` object for each sub-domain and then merge them together at the top level. + +### Partitions and backfills + +- `BackfillPolicy`s assigned to assets are now respected for backfills launched from jobs that target those assets. +- You can now wipe materializations for individual asset partitions. + +### Automation + +- [Experimental] You can now add `AutomationCondition`s to your assets to have them automatically executed in response to specific conditions ([docs](https://docs.dagster.io/concepts/automation/declarative-automation)). These serve as a drop-in replacement and improvement over the `AutoMaterializePolicy` system, which is being marked as deprecated. +- [Experimental] Sensors and schedules can now directly target assets, via the new `target` parameter, instead of needing to construct a job. +- [Experimental] The Timeline page can now be grouped by job or automation. When grouped by automation, all runs launched by a sensor responsible for evaluating automation conditions will get bucketed to that sensor in the timeline instead of the "Ad-hoc materializations" row. Enable this by opting in to the `Experimental navigation` feature flag in user settings. + +### Catalog + +- The Asset Details page now prominently displays row count and relation identifier (table name, schema, database), when corresponding asset metadata values are provided. For more information, see the [metadata and tags docs](https://docs.dagster.io/concepts/metadata-tags#metadata--tags). +- Introduced code reference metadata which can be used to open local files in your editor, or files in source control in your browser. Dagster can automatically attach code references to your assets’ Python source. For more information, see the [docs](https://docs.dagster.io/guides/dagster/code-references). + +### Data quality and reliability + +- [Experimental] Metadata bound checks – The new `build_metadata_bounds_checks` API [[doc](https://docs.dagster.io/_apidocs/asset-checks#dagster.build_metadata_bounds_checks)] enables easily defining asset checks that fail if a numeric asset metadata value falls outside given bounds. +- [Experimental] Freshness checks from dbt config - Freshness checks can now be set on dbt assets, straight from dbt. Check out the API docs for [build_freshness_checks_from_dbt_assets](https://docs.dagster.io/_apidocs/libraries/dagster-dbt#dagster_dbt.build_freshness_checks_from_dbt_assets) for more. + +### Integrations + +- Dagster Pipes (`PipesSubprocessClient`) and its integrations with Lambda (`PipesLambdaClient`), Kubernetes (`PipesK8sClient`), and Databricks (`PipesDatabricksClient`) are no longer experimental. +- The new `DbtProject` class ([docs](https://docs.dagster.io/_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject)) makes it simpler to define dbt assets that can be constructed in both development and production. `DbtProject.prepare_if_dev()` eliminates boilerplate for local development, and the `dagster-dbt project prepare-and-package` CLI can helps pull deps and generate the manifest at build time. +- [Experimental] The `dagster-looker` package can be used to define a set of Dagster assets from a Looker project that is defined in LookML and is backed by git. See the [GitHub discussion](https://github.com/dagster-io/dagster/discussions/23479) for more details. + +### Dagster Plus + +- Catalog views — In Dagster+, selections into the catalog can now be saved and shared across an organization as catalog views. Catalog views have a name and description, and can be applied to scope the catalog, asset health, and global asset lineage pages against the view’s saved selection. +- Code location history — Dagster+ now stores a history of code location deploys, including the ability to revert to a previously deployed configuration. + +## Changes since 1.7.16 (core) / 0.22.16 (libraries) + +### New + +- The target of both schedules and sensors can now be set using an experimental `target` parameter that accepts an `AssetSelection` or list of assets. Any assets passed this way will also be included automatically in the `assets` list of the containing `Definitions` object. +- `ScheduleDefinition` and `SensorDefinition` now have a `target` argument that can accept an `AssetSelection`. +- You can now wipe materializations for individual asset partitions. +- `AssetSpec` now has a `partitions_def` attribute. All the `AssetSpec`s provided to a `@multi_asset` must have the same `partitions_def`. +- The `assets` argument on `materialize` now accepts `AssetSpec`s. +- The `assets` argument on `Definitions` now accepts `AssetSpec`s. +- The new `merge` method on `Definitions` enables combining multiple `Definitions` object into a single larger `Definition`s object with their combined contents. +- Runs requested through the Declarative Automation system now have a `dagster/from_automation_condition: true` tag applied to them. +- Changed the run tags query to be more performant. Thanks [@egordm](https://github.com/egordm)! +- Dagster Pipes and its integrations with Lambda, Kubernetes, and Databricks are no longer experimental. +- The `Definitions` constructor will no longer raise errors when the provided definitions aren’t mutually resolve-able – e.g. when there are conflicting definitions with the same name, unsatisfied resource dependencies, etc. These errors will still be raised at code location load time. The new `Definitions.validate_loadable` static method also allows performing the validation steps that used to occur in constructor. +- `AssetsDefinitions` object provided to a `Definitions` object will now be deduped by reference equality. That is, the following will now work: + + ```python + from dagster import asset, Definitions + + @asset + def my_asset(): ... + + defs = Definitions(assets=[my_asset, my_asset]) # Deduped into just one AssetsDefinition. + ``` + +- [dagster-embedded-elt] Adds translator options for dlt integration to override auto materialize policy, group name, owners, and tags +- [dagster-sdf] Introducing the dagster-sdf integration for data modeling and transformations powered by sdf. +- [dagster-dbt] Added a new `with_insights()` method which can be used to more easily attach Dagster+ Insights metrics to dbt executions: `dbt.cli(...).stream().with_insights()` + +### Bugfixes + +- Dagster now raises an error when an op yields an output corresponding to an unselected asset. +- Fixed a bug that caused downstream ops within a graph-backed asset to be skipped when they were downstream of assets within the graph-backed assets that aren’t part of the selection for the current run. +- Fixed a bug where code references did not work properly for self-hosted GitLab instances. Thanks [@cooperellidge](https://github.com/cooperellidge)! +- [ui] When engine events with errors appear in run logs, their metadata entries are now rendered correctly. +- [ui] The asset catalog greeting now uses your first name from your identity provider. +- [ui] The create alert modal now links to the alerting documentation, and links to the documentation have been updated. +- [ui] Fixed an issue introduced in the 1.7.13 release where some asset jobs were only displaying their ops in the Dagster UI instead of their assets. +- Fixed an issue where terminating a run while it was using the Snowflake python connector would sometimes move it into a FAILURE state instead of a CANCELED state. +- Fixed an issue where backfills would sometimes move into a FAILURE state instead of a CANCELED state when the backfill was canceled. + +### Breaking Changes + +- The experimental and deprecated `build_asset_with_blocking_check` has been removed. Use the `blocking` argument on `@asset_check` instead. +- Users with `mypy` and `pydantic` 1 may now experience a “metaclass conflict” error when using `Config`. Previously this would occur when using pydantic 2. +- `AutoMaterializeSensorDefinition` has been renamed `AutomationConditionSensorDefinition`. +- The deprecated methods of the `ComputeLogManager` have been removed. Custom `ComputeLogManager` implementations must also implement the `CapturedLogManager` interface. This will not affect any of the core implementations available in the core `dagster` package or the library packages. +- By default, an `AutomationConditionSensorDefinition` with the name `“default_automation_condition_sensor”` will be constructed for each code location, and will handle evaluating and launching runs for all `AutomationConditions` and `AutoMaterializePolicies` within that code location. You can restore the previous behavior by setting: + ```yaml + auto_materialize: + use_sensors: False + ``` + in your dagster.yaml file. +- [dagster-dbt] Support for `dbt-core==1.6.*` has been removed because the version is now end-of-life. +- [dagster-dbt] The following deprecated APIs have been removed: + - `KeyPrefixDagsterDbtTranslator` has been removed. To modify the asset keys for a set of dbt assets, implement`DagsterDbtTranslator.get_asset_key()` instead. + - Support for setting freshness policies through dbt metadata on field `+meta.dagster_freshness_policy` has been removed. Use `+meta.dagster.freshness_policy` instead. + - Support for setting auto-materialize policies through dbt metadata on field `+meta.dagster_auto_materialize_policy` has been removed. Use `+meta.dagster.auto_materialize_policy` instead. + - Support for `load_assets_from_dbt_project`, `load_assets_from_dbt_manifest`, and `dbt_cli_resource` has been removed. Use `@dbt_assets`, `DbtCliResource`, and `DbtProject` instead to define how to load dbt assets from a dbt project and to execute them. + - Support for rebuilt ops like `dbt_run_op`, `dbt_compile_op`, etc has been removed. Use `@op` and `DbtCliResource` directly to execute dbt commands in an op. +- Properties on `AssetExecutionContext` , `OpExecutionContext` , and `ScheduleExecutionContext` that include `datetime`s now return standard Python `datetime` objects instead of [Pendulum datetimes](https://pendulum.eustace.io/docs/). The types in the public API for these properties have always been `datetime` and this change should not be breaking in the majority of cases, but Pendulum datetimes include some additional methods that are not present on standard Python `datetime`s, and any code that was using those methods will need to be updated to either no longer use those methods or transform the `datetime` into a Pendulum datetime. See the 1.8 migration guide for more information and examples. +- `MemoizableIOManager`, `VersionStrategy`, `SourceHashVersionStrategy`, `OpVersionContext`, `ResourceVersionContext`, and `MEMOIZED_RUN_TAG`, which have been deprecated and experimental since pre-1.0, have been removed. + +### Deprecations + +- The Run Status column of the Backfills page has been removed. This column was only populated for backfills of jobs. To see the run statuses for job backfills, click on the backfill ID to get to the Backfill Details page. +- The experimental `external_assets_from_specs` API has been deprecated. Instead, you can directly pass `AssetSpec` objects to the `assets` argument of the `Definitions` constructor. +- `AutoMaterializePolicy` has been marked as deprecated in favor of `AutomationCondition` , which provides a significantly more flexible and customizable interface for expressing when an asset should be executed. More details on how to migrate your `AutoMaterializePolicies` can be found in the Migration Guide. +- `SourceAsset` has been deprecated. See the major changes section and migration guide for more details. +- The `asset_partition_key_for_output`, `asset_partition_keys_for_output`, and `asset_partition_key_range_for_output`, and `asset_partitions_time_window_for_output` methods on `OpExecutionContext` have been deprecated. Instead, use the corresponding property: `partition_key`, `partition_keys`, `partition_key_range`, or `partition_time_window`. +- The `partitions_def` parameter on `define_asset_job` is now deprecated. The `partitions_def` for an asset job is determined from the `partitions_def` attributes on the assets it targets, so this parameter is redundant. +- [dagster-shell] `create_shell_command_op` and `create_shell_script_op` have been marked as deprecated in favor of `PipesSubprocessClient` (see details in [Dagster Pipes subprocess reference](https://docs.dagster.io/concepts/dagster-pipes/subprocess/reference)) +- [dagster-airbyte] `load_assets_from_airbyte_project` is now deprecated, because the Octavia CLI that it relies on is an experimental feature that is no longer supported. Use `build_airbyte_assets` or `load_assets_from_airbyte_project` instead. + +### Documentation + +- The Asset Checks concept overview page now includes a table with all the built-in asset checks. +- The Asset Metadata page concept page now includes a table with all the standard “dagster/” metadata keys. +- Fixed a typo in the documentation for `MonthlyPartitionsDefinition`. Thanks [@zero_stroke](https://github.com/zero_stroke)! +- Added a new page about Declarative Automation and a guide about customizing automation conditions +- Fixed a link in the Limiting concurrency guide. + +### Dagster Plus + +- In Dagster+, selections into the catalog can now be saved and shared across an organization as catalog views. Catalog views have a name and description, and can be applied to scope the catalog, asset health, and global asset lineage pages against the view’s saved selection. +- In Dagster+ run alerts, if you are running Dagster 1.8 or greater in your user code, you will now receive exception-level information in the alert body. + +# 1.7.16 (core) / 0.23.16 (libraries) + +### Experimental + +- [pipes] PipesGlueClient, an AWS Glue pipes client has been added to `dagster_aws`. + +# 1.7.15 (core) / 0.23.15 (libraries) + +### New + +- [dagster-celery-k8s] Added a `per_step_k8s_config` configuration option to the `celery_k8s_job_executor` , allowing the k8s configuration of individual steps to be configured at run launch time. Thanks [@alekseik1](https://github.com/alekseik1)! +- [dagster-dbt] Deprecated the `log_column_level_metadata` macro in favor of the new `with_column_metadata` API. +- [dagster-airbyte] Deprecated `load_assets_from_airbyte_project` as the Octavia CLI has been deprecated. + +### Bugfixes + +- [ui] Fix global search to find matches on very long strings. +- Fixed an issue introduced in the 1.7.14 release where multi-asset sensors would sometimes raise an error about fetching too many event records. +- Fixes an issue introduced in 1.7.13 where type-checkers interpretted the return type of `RunRequest(...)` as `None` +- [dagster-aws] Fixed an issue where the `EcsRunLauncher` would sometimes fail to launch runs when the `include_sidecars` option was set to `True`. +- [dagster-dbt] Fixed an issue where errors would not propagate through deferred metadata fetches. + +### Dagster Plus + +- On June 20, 2024, AWS changed the AWS CloudMap CreateService API to allow resource-level permissions. The Dagster+ ECS Agent uses this API to launch code locations. We’ve updated the Dagster+ ECS Agent CloudFormation template to accommodate this change for new users. Existing users have until October 14, 2024 to add the new permissions and should have already received similar communication directly from AWS. +- Fixed a bug with BigQuery cost tracking in Dagster+ insights, where some runs would fail if there were null values for either `total_byte_billed` or `total_slot_ms` in the BigQuery `INFORMATION_SCHEMA.JOBS` table. +- Fixed an issue where code locations that failed to load with extremely large error messages or stack traces would sometimes cause errors with agent heartbeats until the code location was redeployed. + +# 1.7.14 (core) / 0.23.14 (libraries) + +### New + +- [blueprints] When specifying an asset key in `ShellCommandBlueprint`, you can now use slashes as a delimiter to generate an `AssetKey` with multiple path components. +- [community-controbution][mlflow] The mlflow resource now has a `mlflow_run_id` attribute (Thanks Joe Percivall!) +- [community-contribution][mlflow] The mlflow resource will now retry when it fails to fetch the mlflow run ID (Thanks Joe Percivall!) + +### Bugfixes + +- Fixed an issue introduced in the 1.7.13 release where Dagster would fail to load certain definitions when using Python 3.12.4. +- Fixed an issue where in-progress steps would continue running after an unexpected exception caused a run to fail. +- [dagster-dbt] Fixed an issue where column lineage was unable to be built in self-referential incremental models. +- Fixed an issue where `dagster dev` was logging unexpectedly without the `grpcio<1.65.0` pin. +- Fixed an issue where a `ContextVar was created in a different context` error was raised when executing an async asset. +- [community-contribution] `multi_asset` type-checker fix from @aksestok, thanks! +- [community-contribution][ui] Fix to use relative links for manifest/favicon files, thanks @aebrahim! + +### Documentation + +- [community-contribution] Fixed helm repo CLI command typo, thanks @fxd24! + +### Dagster Plus + +- [ui] The deployment settings yaml editor is now on a page with its own URL, instead of within a dialog. + +# 1.7.13 (core) / 0.23.13 (libraries) + +### New + +- The `InputContext` passed to an `IOManager` ’s `load_input` function when invoking the `output_value` or `output_for_node` methods on `JobExecutionResult` now has the name `"dummy_input_name"` instead of `None`. +- [dagster-ui] Asset materializations can now be reported from the dropdown menu in the asset list view. +- [dagster-dbt] `DbtProject` is adopted and no longer experimental. Using `DbtProject` helps achieve a setup where the dbt manifest file and dbt dependencies are available and up-to-date, during development and in production. Check out the API docs for more: [https://docs.dagster.io/\_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject](https://docs.dagster.io/_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject). +- [dagster-dbt] The `—use-dbt-project` flag was introduced for the cli command `dagster-dbt project scaffold`. Creating a Dagster project wrapping a dbt project using that flag will include a `DbtProject` . +- [dagster-ui] The Dagster UI now loads events in batches of 1000 in the run log viewer, instead of batches of 10000. This value can be adjusted by setting the `DAGSTER_UI_EVENT_LOAD_CHUNK_SIZE` environment variable on the Dagster webserver. +- Asset backfills will now retry if there is an unexpected exception raised in the middle of the backfill. Previously, they would only retry if there was a problem connecting to the code server while launching runs in the backfill. +- Added the ability to monitor jobs which have failed to start in time with the `RunFailureReason.START_TIMEOUT` run monitoring failure reason. Thanks @jobicarter! +- [experimental] Introduced the ability to attach code references to your assets, which allow you to view source code for an asset in your editor or in git source control. For more information, see the code references docs: [https://docs.dagster.io/guides/dagster/code-references](https://docs.dagster.io/guides/dagster/code-references). +- [ui] Performance improvements to loading the asset overview tab. +- [ui] Performance improvements for rendering gantt charts with 1000’s of ops/steps. +- [dagster-celery] Introduced a new Dagster Celery runner, a more lightweight way to run Dagster jobs without an executor. Thanks, @egordm! + +### Bugfixes + +- Fixed a bug that caused tags added to `ObserveResult` objects to not be stored with the produced `AssetObservation` event. +- Fixed a bug which could cause `metadata` defined on `SourceAssets` to be unavailable when accessed in an IOManager. +- For subselections of graph-backed multi-assets, there are some situations where we used to unnecessarily execute some of the non-selected assets. Now, we no longer execute them in those situations. There are also some situations where we would skip execution of some ops that might be needed. More information on the particulars is available [here](https://github.com/dagster-io/dagster/pull/22733). +- Fixed the `@graph_asset` decorator overload missing an `owners` argument, thanks @askvinni! +- Fixed behavior of passing custom image config to the K8sRunLauncher, thanks @[marchinho11](https://github.com/marchinho11)! +- [dagster-dbt] Fixed an issue with emitting column lineage when using BigQuery. +- [dagster-k8s] Added additional retries to `execute_k8s_job` when there was a transient failure while loading logs from the launched job. Thanks [@piotrmarczydlo](https://github.com/piotrmarczydlo)! +- [dagster-fivetran] Fixed an issue where the Fivetran connector resource would sometimes hang if there was a networking issue while connecting to the Fivetran API. +- [dagster-aws] Fixed an issue where the EMR step launcher would sometimes fail due to multiple versions of the `dateutil` package being installed in the default EMR python evnrionment. +- [ui] The “Create date” column in the runs table now correctly shows the time at which a run was created instead of the time when it started to execute. +- [ui] Fixed dark mode colors in run partitions graphs. +- [auto-materialize] Fixed an issue which could cause errors in the `AutoMaterializeRule.skip_on_parent_missing` rule when a parent asset had its `PartitionsDefinition` changed. +- [declarative-automation] Fixed an issue which could cause errors when viewing the evaluation history of assets with `AutomationConditions`. +- [declarative-automation] Previously, `AutomationCondition.newly_updated()` would trigger on any `ASSET_OBSERVATION` event. Now, it only triggers when the data version on that event changes. + +### Breaking Changes + +- [dagster-dbt] The cli command `dagster-dbt project prepare-for-deployment` has been replaced by `dagster-dbt project prepare-and-package`. +- [dagster-dbt] During development,`DbtProject` no longer prepares the dbt manifest file and dbt dependencies in its constructor during initialization. This process has been moved to `prepare_if_dev()`, that can be called on the `DbtProject` instance after initialization. Check out the API docs for more: [https://docs.dagster.io/\_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject.prepare_if_dev](https://docs.dagster.io/_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject.prepare_if_devhttps://docs.dagster.io/_apidocs/libraries/dagster-dbt#dagster_dbt.DbtProject.prepare_if_dev). + +### Deprecations + +- Passing `GraphDefinition` as the `job` argument to schedules and sensors is deprecated. Derive a job from the `GraphDefinition` using `graph_def.to_job()` and pass this instead. + +### Documentation + +- Added some additional copy, headings, and other formatting to the [dbt quickstart](https://docs.dagster.io/integrations/dbt/quickstart). +- Added information about asset checks to the [Testing assets guide](https://docs.dagster.io/guides/dagster/testing-assets). +- Updated `dagster-plus CLI` in the sidenav to correctly be `dagster-cloud CLI`. +- Thanks to Tim Nordenfur and Dimitar Vanguelov for fixing a few typos! +- Introduced guides to migrate Airflow pipelines to Dagster that leverage the TaskFlow API or are containerized and executed with an operator like the KubernetesPodOperator. +- Fixed instructions on setting secrets in Kubernetes Dagster deployments, thanks @abhinavDhulipala! + +### Dagster Plus + +- A history of code location deploys can now be viewed on the Code Locations tab under the Deployment view. Previously deployed versions can now be restored from history. +- [ui] Various improvements have been made to the asset health dashboard, which is now no longer experimental. +- [ui] Fixed issues in per-event asset insights where bar charts incorrectly displayed events in reverse order, and with UTC timestamps. +- Fixed a recent regression where creating an alert that notifies asset owners that are teams raises an error. + +# 1.7.12 (core)/ 0.23.12 (libraries) + +### Bugfixes + +- [ui] fixes behavior issues with jobs and asset pages introduced in 1.7.11 + +# 1.7.11 (core)/ 0.23.11 (libraries) + +### New + +- [ui] Improved performance for loading assets that are part of big asset graphs. +- [ui] Improved performance for loading job backfills that have thousands of partitions +- [ui] The code location page can now be filtered by status +- [agent] K8s and ECS agent main loop writes a sentinel file that can be used for liveness checks. +- [agent][experimental] ECS CloudFormation template with private IP addresses using NAT Gateways, security groups, IAM role separation, tighter permissions requirements, and improved documentation. +- Ephemeral asset jobs are now supported in run status sensors (thanks [@the4thamigo-uk](https://github.com/the4thamigo-uk))! + +### Bugfixes + +- In `AssetsDefinition` construction, enforce single key per output name +- Fixed a bug where freshness checks on assets with both observations and materializations would incorrectly miss a materialization if there’s no observation with `dagster/last_updated_timestamp`. +- Fixed a bug with anomaly detection freshness checks where “not enough records” result would cause the sensor to crash loop. +- Fixed a bug that could cause errors in the Asset Daemon if an asset using `AutoMaterializeRule.skip_on_not_all_parents_updated_since_cron()` rule gained a new dependency with a different PartitionsDefinition. +- [ui] Fixed an issue that caused the backfill page not to be scrollable. +- [ui] Fixed an issue where filtering by partition on the Runs page wouldn’t work if fetching all of your partitions timed out. +- [dagster-dlt] Fixed bug with dlt integration in which partitioned assets would change the file name when using the filesystem destination. +- [ui] Fixed an issue where an erroring code location would cause multiple toast popups. +- Allow a string to be provided for `source_key_prefix` arg of `load_assets_from_modules`. (thanks [@drjlin](https://github.com/drjlin))! +- Added a missing debug level log message when loading partitions with polars (thanks [Daniel Gafni](https://github.com/danielgafni))! +- Set postgres timeout via statement, which improves storage-layer compatibility with Amazon RDS (thanks [@james lewis](https://github.com/jameslewisfaculty))! +- In DBT integration, quote the table identifiers to handle cases where table names require quotes due to special characters. (thanks [@alex launi](https://github.com/lamalex))! +- remove deprecated param usage in dagster-wandb integration (thanks [@chris histe](https://github.com/chrishiste))! +- Add missing QUEUED state to DatabricksRunLifeCycleState (thanks [@gabor ratky](https://github.com/gaborratky-db))! +- Fixed a bug with dbt-cloud integration subsetting implementation (thanks [@ivan tsarev](https://github.com/mudravrik))! + +### Breaking Changes + +- [dagster-airflow] `load_assets_from_airflow_dag` no longer allows multiple tasks to materialize the same asset. + +### Documentation + +- Added type-hinting to backfills example +- Added syntax highlighting to some examples (thanks [@Niko](https://github.com/nikomancy))! +- Fixed broken link (thanks [@federico caselli](https://github.com/caselit))! + +### Dagster Plus + +- The `dagster-cloud ci init` CLI will now use the `--deployment` argument as the base deployment when creating a branch deployment. This base deployment will be used for Change Tracking. +- The BigQuery dbt insights wrapper `dbt_with_bigquery_insights` now respects CLI arguments for profile configuration and also selects location / dataset from the profile when available. +- [experimental feature] Fixes a recent regression where the UI errored upon attempting to create an insights metric alert. + +# 1.7.10 (core)/ 0.23.10 (libraries) + +### New + +- Performance improvements when rendering the asset graph while runs are in progress. +- A new API `build_freshness_checks_for_dbt_assets` which allows users to parameterize freshness checks entirely within dbt. Check out the API docs for more: https://docs.dagster.io/_apidocs/libraries/dagster-dbt#dbt-dagster-dbt. +- Asset search results now display compute and storage kind icons. +- Asset jobs where the underlying assets have multiple backfill policies will no longer fail at definition time. Instead, the backfill policy for the job will use the minimum `max_partitions_per_run` from the job’s constituent assets. +- [dagstermill] `asset_tags` can now be specified when building dagstermill assets +- [dagster-embedded-elt] Custom asset tags can be applied to Sling assets via the `DagsterSlingTranslator` +- [dagster-embedded-elt] dlt assets now automatically have `dagster/storage_kind` tags attached + +### Bugfixes + +- `tags` passed to `outs` in `graph_multi_asset` now get correctly propagated to the resulting assets. +- [ui] Fixed an issue in the where when multiple runs were started at the same time to materialize the same asset, the most recent one was not always shown as in progress in the asset graph in the Dagster UI. +- The “newly updated” auto-materialize rule will now respond to either new observations or materializations for observable assets. +- `build_metadata_bounds_checks` now no longer errors when targeting metadata keys that have special characters. + +### Documentation + +- The [Schedule concept docs](https://docs.dagster.io/concepts/automation/schedules) got a revamp! Specifically, we: + - Updated the Schedule concept page to be a “jumping off” point for all-things scheduling, including a high-level look at how schedules work, their benefits, and what you need to know before diving in + - Added some basic how-to guides for [automating assets](https://docs.dagster.io/concepts/automation/schedules/automating-assets-schedules-jobs) and [ops](https://docs.dagster.io/concepts/automation/schedules/automating-ops-schedules-jobs) using schedules + - Added a [reference of schedule-focused examples](https://docs.dagster.io/concepts/automation/schedules/examples) + - Added dedicated guides for common schedule uses, including creating p[artitioned schedules](https://docs.dagster.io/concepts/automation/schedules/partitioned-schedules), [customizing executing timezones](https://docs.dagster.io/concepts/automation/schedules/customizing-executing-timezones), [testing](https://docs.dagster.io/concepts/automation/schedules/testing), and [troubleshooting](https://docs.dagster.io/concepts/automation/schedules/troubleshooting) + +### Dagster Plus + +- [experimental] The backfill daemon can now store logs and display them in the UI for increased visibility into the daemon’s behavior. Please contact Dagster Labs if you are interested in piloting this experimental feature. +- Added a `--read-only` flag to the `dagster-cloud ci branch-deployment` CLI command, which returns the current branch deployment name for the current code repository branch without update the status of the branch deployment. + +# 1.7.9 (core) / 0.23.9 (libraries) + +### New + +- Dagster will now display a “storage kind” tag on assets in the UI, similar to the existing compute kind. To set storage kind for an asset, set its `dagster/storage_kind` tag. +- You can now set retry policy on dbt assets, to enable coarse-grained retries with delay and jitter. For fine-grained partial retries, we still recommend invoking `dbt retry` within a try/except block to avoid unnecessary, duplicate work. +- `AssetExecutionContext` now exposes a `has_partition_key_range` property. +- The `owners`, `metadata`, `tags`, and `deps` properties on `AssetSpec` are no longer `Optional`. The `AssetSpec` constructor still accepts `None` values, which are coerced to empty collections of the relevant type. +- The `docker_executor` and `k8s_job_executor` now consider at most 1000 events at a time when loading events from the current run to determine which steps should be launched. This value can be tuned by setting the `DAGSTER_EXECUTOR_POP_EVENTS_LIMIT` environment variable in the run process. +- Added a `dagster/retry_on_asset_or_op_failure` tag that can be added to jobs to override run retry behavior for runs of specific jobs. See [the docs](https://docs.dagster.io/deployment/run-retries#combining-op-and-run-retries) for more information. +- Improved the sensor produced by `build_sensor_for_freshness_checks` to describe when/why it skips evaluating freshness checks. +- A new “Runs” tab on the backfill details page allows you to see list and timeline views of the runs launched by the backfill. +- [dagster-dbt] dbt will now attach relation identifier metadata to asset materializations to indicate where the built model is materialized to. +- [dagster-graphql] The GraphQL Python client will now include the HTTP error code in the exception when a query fails. Thanks [@yuvalgimmunai](https://github.com/yuvalgimmunai)! + +### Bugfixes + +- Fixed sensor logging behavior with the `@multi_asset_sensor`. +- `ScheduleDefinition` now properly supports being passed a `RunConfig` object. +- When an asset function returns a `MaterializeResult`, but the function has no type annotation, previously, the IO manager would still be invoked with a `None` value. Now, the IO manager is not invoked. +- The `AssetSpec` constructor now raises an error if an invalid owner string is passed to it. +- When using the `graph_multi_asset` decorator, the `code_version` property on `AssetOut`s passed in used to be ignored. Now, they no longer are. +- [dagster-deltalake] Fixed GcsConfig import error and type error for partitioned assets (Thanks [@thmswt](https://github.com/thmswt)) +- The asset graph and asset catalog now show the materialization status of External assets (when manually reported) rather than showing “Never observed” + +### Documentation + +- The External Assets REST APIs now have their own [reference page](https://docs.dagster.io/apidocs/external-assets-rest) +- Added details, updated copy, and improved formatting to External Assets REST APIs + +### Dagster Plus + +- The ability to set a custom base deployment when creating a branch deployment has been enabled for all organizations. +- When a code location fails to deploy, the Kubernetes agent now includes additional any warning messages from the underlying replicaset in the failure message to aid with troubleshooting. +- Serverless deployments now support using a requirements.txt with [hashes](https://pip.pypa.io/en/stable/topics/secure-installs/#secure-installs). +- Fixed an issue where the `dagster-cloud job launch` command did not support specifying asset keys with prefixes in the `--asset-key` argument. +- [catalog UI] Catalog search now allows filtering by type, i.e. `group:`, `code location:`, `tag:`, `owner:`. +- New dagster+ accounts will now start with two default alert policies; one to alert if the default free credit budget for your plan is exceeded, and one to alert if a single run goes over 24 hours. These alerts will be sent as emails to the email with which the account was initially created. + +# 1.7.8 (core) / 0.23.8 (libraries) + +### New + +- Backfills created via GQL can have a custom title and description. +- `Definitions` now has a `get_all_asset_specs` method, which allows iterating over properties of the defined assets +- [ui] In filter dropdowns, it’s now possible to submit before all the suggestions have been loaded (thanks [@bmalehorn](https://github.com/bmalehorn)!) +- [ui] Performance improvements when loading the Dagster UI for asset graphs with thousands of partition keys. +- [dagster-dbt] Dbt asset checks now emit execution duration and the number of failing rows as metadata +- [dagster-embedded-elt] Added support for partitioning in dlt assets (thanks [@edsoncezar16](https://github.com/edsoncezar16)!) +- [dagster-embedded-elt] Added ability to set custom metadata on dlt assets (thanks [@edsoncezar16](https://github.com/edsoncezar16)!) +- [dagster-graphql] Added a `terminate_runs` method to the Python GraphQL Client. (thanks [@baumann-t](https://github.com/baumann-t)!) +- [dagster-polars] dagster-polars IO managers now emit dagster/row_count metadata (thanks [@danielgafni](https://github.com/danielgafni)!) +- [dagster-dbt] `DbtCliInvocation` now has a `.get_error()` method that can be useful when using `dbt.cli(..., raise_on_error=False)`. + +### Bugfixes + +- Fix a bug with legacy `DynamicPartitionsDefinition` (using `partitions_fn`) that caused a crash during job backfills. +- [ui] On the asset graph, filtering to one or more code locations via the Filter dropdown now works as expected. +- [ui] On the asset overview page, viewing an asset with no definition in a loaded code location no longer renders a clipped empty state. + +### Experimental + +- The new `build_metadata_bounds_checks` API creates asset checks which verify that numeric metadata values on asset materializations fall within min or max values. See the [documentation](https://docs.dagster.io/_apidocs/asset-checks#dagster.build_metadata_bounds_checks) for more information. + +### Documentation + +- Added details and links to the [Schedules and Sensors API documentation](https://docs.dagster.io/_apidocs/schedules-sensors) +- Removed leftover mention of Dagster Cloud from the [Dagster+ Hybrid architecture documentation](https://docs.dagster.io/dagster-plus/deployment/hybrid) + +### Dagster Plus + +- Fixed an incompatibility between `build_sensor_for_freshness_checks` and Dagster Plus. This API should now work when used with Dagster Plus. +- [ui] Billing / usage charts no longer appear black-on-black in Dagster’s dark mode. +- [ui] The asset catalog is now available for teams plans. +- [ui] Fixed a bug where the alert policy editor would misinterpret the threshold on a long-running job alert. +- [kubernetes] Added a `dagsterCloudAgent.additionalPodSpecConfig` to the Kubernetes agent Helm chart allowing arbitrary pod configuration to be applied to the agent pod. +- [ECS] Fixed an issue where the ECS agent would sometimes raise a “Too many concurrent attempts to create a new revision of the specified family” exception when using agent replicas. + +# 1.7.7 (core) / 0.23.7 (libraries) + +### New + +- [ui] Command clicking on nodes in the asset lineage tab will now open them in a separate tab. Same with external asset links in the asset graph. +- Added support for setting a custom job namespace in user code deployments. (thanks [@tmatthews0020](https://github.com/tmatthews0020)!) +- Removed warnings due to use of `datetime.utcfromtimestamp` (thanks @[dbrtly](https://github.com/dbrtly)!) +- Custom smtp user can now be used for e-mail alerts (thanks @[edsoncezar16](https://github.com/edsoncezar16)!) +- [dagster-dbt] Added support for `dbt-core==1.8.*`. +- [dagster-embedded-elt] Failed dlt pipelines are now accurately reflected on the asset materialization (thanks @[edsoncezar16](https://github.com/edsoncezar16)!) + +### Bugfixes + +- Fixed spurious errors in logs due to module shadowing. +- Fixed an issue in the Backfill Daemon where if the assets to be materialized had different `BackfillPolicy`s, each asset would get materialized in its own run, rather than grouping assets together into single run. +- Fixed an issue that could cause the Asset Daemon to lose information in its cursor about an asset if that asset’s code location was temporarily unavailable. +- [dagster-dbt] Mitigated issues with cli length limits by only listing specific dbt tests as needed when the tests aren’t included via indirect selection, rather than listing all tests. + +### Documentation + +- Markdoc tags can now be used in place of MDX components (thanks @[nikomancy](https://github.com/nikomancy)) + +# 1.7.6 (core) / 0.23.6 (libraries) + +### New + +- The backfill daemon now has additional logging to document the progression through each tick and why assets are and are not materialized during each evaluation of a backfill. +- Made performance improvements in both calculating and storing data version for assets, especially for assets with a large fan-in. +- Standardized table row count metadata output by various integrations to `dagster/row_count` . +- [dagster-aws][community-contribution] Additional parameters can now be passed to the following resources: `CloudwatchLogsHandler`, `ECRPublicClient`, `SecretsManagerResource`, `SSMResource` thanks `@jacob-white-simplisafe` ! +- Added additional frontend telemetry. See https://docs.dagster.io/about/telemetry for more information. + +### Bugfixes + +- Fixed issue that could cause runs to fail if they targeted any assets which had a metadata value of type `TableMetadataValue`, `TableSchemaMetadataValue`, or `TableColumnLineageMetadataValue` defined. +- Fixed an issue which could cause evaluations produced via the Auto-materialize system to not render the “skip”-type rules. +- Backfills of asset jobs now correctly use the `BackfillPolicy` of the underlying assets in the job. +- [dagster-databricks][community-contribution] `databricks-sdk` version bumped to `0.17.0`, thanks `@lamalex` ! +- [helm][community-contribution] resolved incorrect comments about `dagster code-server start` , thanks `@SanjaySiddharth` ! + +### Documentation + +- Added section headings to Pipes API references, along with explanatory copy and links to relevant pages +- Added a guide for subletting asset checks +- Add more detailed steps to transition from serverless to hybrid +- [community-contribution] asset selection syntax corrected, thanks `@JonathanLai2004`! + +### Dagster Plus + +- Fixed an issue where Dagster Cloud agents would wait longer than necessary when multiple code locations were timing out during a deployment. + +# 1.7.5 (core) / 0.23.5 (libraries) + +### New + +- The Asset > Checks tab now allows you to view plots of numeric metadata emitted by your checks. +- The Asset > Events tab now supports infinite-scrolling, making it possible to view all historical materialization and observation events. +- When constructing a `MaterializeResult`, `ObserveResult`, or `Output`, you can now include tags that will be attached to the corresponding `AssetMaterialization` or `AssetObservation` event. These tags will be rendered on these events in the UI. + +### Bugfixes + +- Fixed an issue where backfills would sometimes fail if a partition definition was changed in the middle of the backfill. +- Fixed an issue where if the code server became unavailable during the first tick of a backfill, the backfill would stall and be unable to submit runs once the code server became available. +- Fixed an issue where the status of an external asset would not get updated correctly. +- Fixed an issue where run status sensors would sometimes fall behind in deployments with large numbers of runs. +- The descriptions and metadata on the experimental `build_last_update_freshness_checks` and `build_time_partition_freshness_checks` APIs have been updated to be clearer. +- The headers of tables no longer become misaligned when a scrollbar is present in some scenarios. +- The sensor type, instigation type, and backfill status filters on their respective pages are now saved to the URL, so sharing the view or reloading the page preserve your filters. +- Typing a `%` into the asset graph’s query selector no longer crashes the UI. +- “Materializing” states on the asset graph animate properly in both light and dark themes. +- Thanks to [@lautaro79](https://github.com/lautaro79) for fixing a helm chart issue. + +### Breaking Changes + +- Subclasses of `MetadataValue` have been changed from `NamedTuple`s to Pydantic models. `NamedTuple` functionality on these classes was not part of Dagster’s stable public API, but usages relying on their tuple-ness may break. For example: calling `json.dumps` on collections that include them. + +### Deprecations + +- [dagster-dbt] Support for `dbt-core==1.5.*` has been removed, as it has reached [end of life in April 2024](https://docs.getdbt.com/docs/dbt-versions/core). + +### Dagster Plus + +- Fixed an issue in the `dagster-cloud` CLI where the `--deployment` argument was ignored when the `DAGSTER_CLOUD_URL` environment variable was set. +- Fixed an issue where `dagster-cloud-cli` package wouldn’t work unless the `dagster-cloud` package was installed as well. +- A new “budget alerts” feature has launched for users on self-serve plans. This feature will alert you when you hit your credit limit. +- The experimental asset health overview now allows you to group assets by compute kind, tag, and tag value. +- The concurrency and locations pages in settings correctly show Dagster Plus-specific options when experimental navigation is enabled. + +# 1.7.4 (core) / 0.23.4 (libraries) + +### New + +- `TimeWindowPartitionMapping` now supports the `start_offset` and `end_offset` parameters even when the upstream `PartitionsDefinition` is different than the downstream `PartitionsDefinition`. The offset is expressed in units of downstream partitions, so `TimeWindowPartitionMapping(start_offset=-1)` between an hourly upstream and a daily downstream would map each downstream partition to 48 upstream partitions – those for the same and preceding day. + +### Bugfixes + +- Fixed an issue where certain exceptions in the Dagster daemon would immediately retry instead of waiting for a fixed interval before retrying. +- Fixed a bug with asset checks in complex asset graphs that include cycles in the underlying nodes. +- Fixed an issue that would cause unnecessary failures on FIPS-enabled systems due to the use of md5 hashes in non-security-related contexts (thanks [@jlloyd-widen](https://github.com/jlloyd-widen)!) +- Removed `path` metadata from `UPathIOManager` inputs. This eliminates the creation of `ASSET_OBSERVATION` events for every input on every step for the default I/O manager. +- Added support for defining `owners` on `@graph_asset`. +- Fixed an issue where having multiple partitions definitions in a location with the same start date but differing end dates could lead to “`DagsterInvalidSubsetError` when trying to launch runs. + +### Documentation + +- Fixed a few issues with broken pages as a result of the Dagster+ rename. +- Renamed a few instances of Dagster Cloud to Dagster+. +- Added a note about external asset + alert incompatibility to the Dagster+ alerting docs. +- Fixed references to outdated apis in freshness checks docs. + +### Dagster Plus + +- When creating a Branch Deployment via GraphQL or the `dagster-cloud branch-deployment` CLI, you can now specify the base deployment. The base deployment will be used for comparing assets for Change Tracking. For example, to set the base deployment to a deployment named `staging`: `dagster-cloud branch-deployment create-or-update --base-deployment-name staging ...`. Note that once a Branch Deployment is created, the base deployment cannot be changed. +- Fixed an issue where agents serving many branch deployments simultaneously would sometimes raise a `413: Request Entity Too Large` error when uploading a heartbeat to the Dagster Plus servers. + +# 1.7.3 (core) / 0.23.3 (libraries) + +### New + +- `@graph_asset` now accepts a `tags` argument +- [ui] For users whose light/dark mode theme setting is set to match their system setting, the theme will update automatically when the system changes modes (e.g. based on time of day), with no page reload required. +- [ui] We have introduced the typefaces Geist and Geist Mono as our new default fonts throughout the Dagster app, with the goal of improving legibility, consistency, and maintainability. +- [ui] [experimental] We have begun experimenting with a [new navigation structure](https://github.com/dagster-io/dagster/discussions/21370) for the Dagster UI. The change can be enabled via User Settings. +- [ui] [experimental] Made performance improvements to the Concurrency settings page. +- [dagster-azure] [community-contribution] ADLS2 IOManager supports custom timeout. Thanks @tomas-gajarsky! +- [dagster-fivetran] [community-contribution] It’s now possible to specify destination ids in `load_asset_defs_from_fivetran_instance`. Thanks @lamalex! + +### Bugfixes + +- Fixed an issue where pressing the “Reset sensor status” button in the UI would also reset the sensor’s cursor. +- Fixed a bug that caused input loading time not to be included in the reported step duration. +- Pydantic warnings are no longer raised when importing Dagster with Pydantic 2.0+. +- Fixed an issue which would cause incorrect behavior when auto-materializing partitioned assets based on updates to a parent asset in a different code location. +- Fixed an issue which would cause every tick of the auto-materialize sensor to produce an evaluation for each asset, even if nothing had changed from the previous tick. +- [dagster-dbt] Fixed a bug that could raise `Duplicate check specs` errors with singular tests ingested as asset checks. +- [embedded-elt] resolved an issue where subset of resources were not recognized when using `source.with_resources(...)` +- [ui] Fixed an issue where a sensor that targeted an invalid set of asset keys could cause the asset catalog to fail to load. +- [ui] Fixed an issue in which runs in the Timeline that should have been considered overlapping were not correctly grouped together, leading to visual bugs. +- [ui] On the asset overview page, job tags no longer render poorly when an asset appears in several jobs. +- [ui] On the asset overview page, hovering over the timestamp tags in the metadata table explains where each entry originated. +- [ui] Right clicking the background of the asset graph now consistently shows a context menu, and the lineage view supports vertical as well as horizontal layout. + +### Documentation + +- Sidebar navigation now appropriately handles command-click and middle-click to open links in a new tab. +- Added a section for asset checks to the [Testing guide](https://docs.dagster.io/concepts/testing#testing-asset-checks). +- Added a guide about [Column-level lineage for assets](https://docs.dagster.io/concepts/metadata-tags/asset-metadata/column-level-lineage). +- Lots of updates to examples to reflect the new opt-in approach to I/O managers. + +### Dagster+ + +- [ui] [experimental] A new Overview > Asset Health page provides visibility into failed and missing materializations, check warnings and check errors. +- [ui] You can now share feedback with the Dagster team directly from the app. Open the Help menu in the top nav, then “Share feedback”. Bugs and feature requests are submitted directly to the Dagster team. +- [ui] When editing a team, the list of team members is now virtualized, allowing for the UI to scale better for very large team sizes. +- [ui] Fixed dark mode for billing components. + +# 1.7.2 (core) / 0.23.2 (libraries) + +### New + +- Performance improvements when loading large asset graphs in the Dagster UI. +- `@asset_check` functions can now be invoked directly for unit testing. +- `dagster-embedded-elt` dlt resource `DagsterDltResource` can now be used from `@op` definitions in addition to assets. +- `UPathIOManager.load_partitions` has been added to assist with helping `UpathIOManager` subclasses deal with serialization formats which support partitioning. Thanks `@danielgafni`! +- [dagster-polars] now supports other data types rather than only string for the partitioning columns. Also `PolarsDeltaIOManager` now supports `MultiPartitionsDefinition` with `DeltaLake` native partitioning. Metadata value `"partition_by": {"dim_1": "col_1", "dim_2": "col_2"}` should be specified to enable this feature. Thanks `@danielgafni`! + +### Bugfixes + +- [dagster-airbyte] Auto materialization policies passed to `load_assets_from_airbyte_instance` and `load_assets_from_airbyte_project` will now be properly propagated to the created assets. +- Fixed an issue where deleting a run that was intended to materialize a partitioned asset would sometimes leave the status of that asset as “Materializing” in the Dagster UI. +- Fixed an issue with `build_time_partition_freshness_checks` where it would incorrectly intuit that an asset was not fresh in certain cases. +- [dagster-k8s] Fix an error on transient ‘none’ responses for pod waiting reasons. Thanks @**[piotrmarczydlo](https://github.com/piotrmarczydlo)!** +- [dagster-dbt] Failing to build column schema metadata will now result in a warning rather than an error. +- Fixed an issue where incorrect asset keys would cause a backfill to fail loudly. +- Fixed an issue where syncing unmaterialized assets could include source assets. + +### Breaking Changes + +- [dagster-polars] `PolarsDeltaIOManager` no longer supports loading natively partitioned DeltaLake tables as dictionaries. They should be loaded as a single `pl.DataFrame`/`pl.LazyFrame` instead. + +### Documentation + +- Renamed `Dagster Cloud` to `Dagster+` all over the docs. +- Added a page about [Change Tracking](https://docs.dagster.io/dagster-plus/managing-deployments/branch-deployments/change-tracking) in Dagster+ branch deployments. +- Added a section about [user-defined metrics](https://docs.dagster.io/concepts/metadata-tags/asset-metadata#asset-owners) to the Dagster+ Insights docs. +- Added a section about [Asset owners](https://docs.dagster.io/concepts/metadata-tags/asset-metadata#asset-owners) to the asset metadata docs. + +### Dagster Cloud + +- Branch deployments now have Change Tracking. Assets in each branch deployment will be compared to the main deployment. New assets and changes to code version, dependencies, partitions definitions, tags, and metadata will be marked in the UI of the branch deployment. +- Pagerduty alerting is now supported with Pro plans. See the [documentation](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/pagerduty) for more info. +- Asset metadata is now included in the insights metrics for jobs materializing those assets. +- Per-run Insights are now available on individual assets. +- Previously, the `before_storage_id` / `after_storage_id` values in the `AssetRecordsFilter` class were ignored. This has been fixed. +- Updated the output of `dagster-cloud deployment alert-policies list` to match the format of `sync`. +- Fixed an issue where Dagster Cloud agents with many code locations would sometimes leave code servers running after the agent shut down. + +# 1.7.1 (core) / 0.23.1 (libraries) + +### New + +- [dagster-dbt][experimental] A new cli command `dagster-dbt project prepare-for-deployment` has been added in conjunction with `DbtProject` for managing the behavior of rebuilding the manifest during development and preparing a pre-built one for production. + +### Bugfixes + +- Fixed an issue with duplicate asset check keys when loading checks from a package. +- A bug with the new `build_last_update_freshness_checks` and `build_time_partition_freshness_checks` has been fixed where multi_asset checks passed in would not be executable. +- [dagster-dbt] Fixed some issues with building column lineage for incremental models, models with implicit column aliases, and models with columns that have multiple dependencies on the same upstream column. + +### Breaking Changes + +- [dagster-dbt] The experimental `DbtArtifacts` class has been replaced by `DbtProject`. + +### Documentation + +- Added a dedicated concept page for all things [metadata and tags](https://docs.dagster.io/concepts/metadata-tags) +- Moved asset metadata content to a dedicated concept page: [Asset metadata](https://docs.dagster.io/concepts/metadata-tags/asset-metadata) +- Added section headings to the [Software-defined Assets API reference](https://docs.dagster.io/_apidocs/assets), which groups APIs by asset type or use +- Added a guide about [user settings in the Dagster UI](https://docs.dagster.io/concepts/webserver/ui-user-settings) +- Added `AssetObservation` to the Software-defined Assets API reference +- Renamed Dagster Cloud GitHub workflow files to the new, consolidated `dagster-cloud-deploy.yml` +- Miscellaneous formatting and copy updates +- [community-contribution] [dagster-embedded-elt] Fixed `get_asset_key` API documentation (thanks @aksestok!) +- [community-contribution] Updated Python version in contributing documentation (thanks @piotrmarczydlo!) +- [community-contribution] Typo fix in README (thanks @MiConnell!) + +### Dagster Cloud + +- Fixed a bug where an incorrect value was being emitted for BigQuery bytes billed in Insights. + +# 1.7.0 (core) / 0.23.0 (libraries) + +## Major Changes since 1.6.0 (core) / 0.22.0 (libraries) + +- Asset definitions can now have tags, via the `tags` argument on `@asset`, `AssetSpec`, and `AssetOut`. [Tags](https://docs.dagster.io/concepts/metadata-tags/tags) are meant to be used for organizing, filtering, and searching for assets. +- The Asset Details page has been revamped to include an “Overview” tab that centralizes the most important information about the asset – such as current status, description, and columns – in a single place. +- Assets can now be assigned owners. +- Asset checks are now considered generally available and will no longer raise experimental warnings when used. +- Asset checks can now be marked `blocking`, which causes downstream assets in the same run to be skipped if the check fails with ERROR-level severity. +- The new `@multi_asset_check` decorator enables defining a single op that executes multiple asset checks. +- The new `build_last_updated_freshness_checks` and `build_time_partition_freshness_checks` APIs allow defining asset checks that error or warn when an asset is overdue for an update. Refer to the [Freshness checks guide](https://docs.dagster.io/concepts/assets/asset-checks/checking-for-data-freshness) for more info. +- The new `build_column_schema_change_checks` API allows defining asset checks that warn when an asset’s columns have changed since its latest materialization. +- In the asset graph UI, the “Upstream data”, “Code version changed”, and “Upstream code version” statuses have been collapsed into a single “Unsynced” status. Clicking on “Unsynced” displays more detailed information. +- I/O managers are now optional. This enhances flexibility for scenarios where they are not necessary. For guidance, see [When to use I/O managers](https://docs.dagster.io/concepts/io-management/io-managers#when-to-use-io-managers). + - Assets with `None` or `MaterializeResult` return type annotations won't use I/O managers; dependencies for these assets can be set using the `deps` parameter in the `@asset` decorator. +- [dagster-dbt] Dagster’s dbt integration can now be configured to automatically collect [metadata about column schema and column lineage](https://docs.dagster.io/integrations/dbt/reference#emit-column-level-metadata-as-materialization-metadata-). +- [dagster-dbt] dbt tests are now pulled in as Dagster asset checks by default. +- [dagster-dbt] dbt resource tags are now automatically pulled in as Dagster asset tags. +- [dagster-snowflake] [dagster-gcp] The dagster-snowflake and dagster-gcp packages now both expose a `fetch_last_updated_timestamps` API, which makes it straightforward to collect data freshness information in source asset observation functions. + +## Changes since 1.6.14 (core) / 0.22.14 (libraries) + +### New + +- Metadata attached during asset or op execution can now be accessed in the I/O manager using `OutputContext.output_metadata`. +- [experimental] Single-run backfills now support batched inserts of asset materialization events. This is a major performance improvement for large single-run backfills that have database writes as a bottleneck. The feature is off by default and can be enabled by setting the `DAGSTER_EVENT_BATCH_SIZE` environment variable in a code server to an integer (25 recommended, 50 max). It is only currently supported in Dagster Cloud and OSS deployments with a postgres backend. +- [ui] The new Asset Details page is now enabled for new users by default. To turn this feature off, you can toggle the feature in the User Settings. +- [ui] Queued runs now display a link to view all the potential reasons why a run might remain queued. +- [ui] Starting a run status sensor with a stale cursor will now warn you in the UI that it will resume from the point that it was paused. +- [asset-checks] Asset checks now support asset names that include `.`, which can occur when checks are ingested from dbt tests. +- [dagster-dbt] The env var `DBT_INDIRECT_SELECTION` will no longer be set to `empty` when executing dbt tests as asset checks, unless specific asset checks are excluded. `dagster-dbt` will no longer explicitly select all dbt tests with the dbt cli, which had caused argument length issues. +- [dagster-dbt] Singular tests with a single dependency are now ingested as asset checks. +- [dagster-dbt] Singular tests with multiple dependencies must have the primary dependency must be specified using dbt meta. + +```sql +{{ + config( + meta={ + 'dagster': { + 'ref': { + 'name': , + 'package': ... # Optional, if included in the ref. + 'version': ... # Optional, if included in the ref. + }, + } + } + ) +}} + +... +``` + +- [dagster-dbt] Column lineage metadata can now be emitted when invoking dbt. See the [documentation for details](https://docs.dagster.io/integrations/dbt/reference#emit-column-level-metadata-as-materialization-metadata-). +- [experimental][dagster-embedded-elt] Add the data load tool (dlt) integration for easily building and integration dlt ingestion pipelines with Dagster. +- [dagster-dbt][community-contribution] You can now specify a custom schedule name for schedules created with `build_schedule_from_dbt_selection`. Thanks [@dragos-pop](https://github.com/dragos-pop)! +- [helm][community-contribution] You can now specify a custom job namespace for your user code deployments. Thanks [@tmatthews0020](https://github.com/tmatthews0020)! +- [dagster-polars][community-contribution] Column schema metadata is now integrated using the dagster-specific metadata key in `dagster_polars`. Thanks [@danielgafni](https://github.com/danielgafni)! +- [dagster-datadog][community-contribution] Added `datadog.api` module to the `DatadogClient` resource, enabling direct access to API methods. Thanks [@shivgupta](https://github.com/shivonchain)! + +### Bugfixes + +- Fixed a bug where run status sensors configured to monitor a specific job would trigger for jobs with the same name in other code locations. +- Fixed a bug where multi-line asset check result descriptions were collapsed into a single line. +- Fixed a bug that caused a value to show up under “Target materialization” in the asset check UI even when an asset had had observations but never been materialized. +- Changed typehint of `metadata` argument on `multi_asset` and `AssetSpec` to `Mapping[str, Any]`. +- [dagster-snowflake-pandas] Fixed a bug introduced in 0.22.4 where column names were not using quote identifiers correctly. Column names will now be quoted. +- [dagster-aws] Fixed an issue where a race condition where simultaneously materializing the same asset more than once would sometimes raise an Exception when using the `s3_io_manager`. +- [ui] Fixed a bug where resizable panels could inadvertently be hidden and never recovered, for instance the right panel on the global asset graph. +- [ui] Fixed a bug where opening a run with an op selection in the Launchpad could lose the op selection setting for the subsequently launched run. The op selection is now correctly preserved. +- [community-contribution] Fixed `dagster-polars` tests by excluding `Decimal` types. Thanks [@ion-elgreco](https://github.com/ion-elgreco)! +- [community-contribution] Fixed a bug where auto-materialize rule evaluation would error on FIPS-compliant machines. Thanks [@jlloyd-widen](https://github.com/jlloyd-widen)! +- [community-contribution] Fixed an issue where an excessive DeprecationWarning was being issued for a `ScheduleDefinition` passed into the `Definitions` object. Thanks [@2Ryan09](https://github.com/2Ryan09)! + +### Breaking Changes + +- Creating a run with a custom non-UUID `run_id` was previously private and only used for testing. It will now raise an exception. +- [community-contribution] Previously, calling `get_partition_keys_in_range` on a `MultiPartitionsDefinition` would erroneously return partition keys that were within the one-dimensional range of alphabetically-sorted partition keys for the definition. Now, this method returns the cartesian product of partition keys within each dimension’s range. Thanks, [@mst](https://github.com/mst)! +- Added `AssetCheckExecutionContext` to replace `AssetExecutionContext` as the type of the `context` param passed in to `@asset_check` functions. `@asset_check` was an experimental decorator. +- [experimental] `@classmethod` decorators have been removed from [dagster-embedded-slt.sling](http://dagster-embedded-slt.sling) `DagsterSlingTranslator` +- [dagster-dbt] `@classmethod` decorators have been removed from `DagsterDbtTranslator`. +- [dagster-k8s] The default merge behavior when raw kubernetes config is supplied at multiple scopes (for example, at the instance level and for a particluar job) has been changed to be more consistent. Previously, configuration was merged shallowly by default, with fields replacing other fields instead of appending or merging. Now, it is merged deeply by default, with lists appended to each other and dictionaries merged, in order to be more consistent with how kubernetes configuration is combined in all other places. See [the docs](https://docs.dagster.io/deployment/guides/kubernetes/customizing-your-deployment#precedence-rules) for more information, including how to restore the previous default merge behavior. + +### Deprecations + +- `AssetSelection.keys()` has been deprecated. Instead, you can now supply asset key arguments to `AssetSelection.assets()` . +- Run tag keys with long lengths and certain characters are now deprecated. For consistency with asset tags, run tags keys are expected to only contain alpha-numeric characters, dashes, underscores, and periods. Run tag keys can also contain a prefix section, separated with a slash. The main section and prefix section of a run tag are limited to 63 characters. +- `AssetExecutionContext` has been simplified. Op-related methods and methods with existing access paths have been marked deprecated. For a full list of deprecated methods see this [GitHub Discussion](https://github.com/dagster-io/dagster/discussions/20974). +- The `metadata` property on `InputContext` and `OutputContext` has been deprecated and renamed to `definition_metadata` . +- `FreshnessPolicy` is now deprecated. For monitoring freshness, use freshness checks instead. If you are using `AutoMaterializePolicy.lazy()`, `FreshnessPolicy` is still recommended, and will continue to be supported until an alternative is provided. + +### Documentation + +- Lots of updates to examples to reflect the recent opt-in nature of I/O managers +- [Dagster Cloud alert guides](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts) have been split up by alert type: + - [Managing alerts in the Dagster Cloud UI](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/managing-alerts-in-ui) + - [Managing alerts using the dagster-cloud CLI](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/managing-alerts-cli) + - [Email alerts](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/email) + - [Microsoft Teams alerts](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/microsoft-teams) + - [Slack alerts](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts/slack) +- Added info about asset check-based-alerts to the Dagster Cloud [alerting docs](https://docs.dagster.io/dagster-cloud/managing-deployments/alerts) +- The [Asset checks documentation](https://docs.dagster.io/concepts/asset-checks) got a face lift - info about defining and executing asset checks [now lives in its own guide](https://docs.dagster.io/concepts/assets/asset-checks/define-execute-asset-checks) +- Added a new guide for [using freshness checks](https://docs.dagster.io/concepts/assets/asset-checks/checking-for-data-freshness) to the Asset checks documentation +- Cleaned up the [Getting help guide](https://docs.dagster.io/getting-started/getting-help) - it now includes a high-level summary of all Dagster support resources, making it easier to skim! +- [community-contribution] Fixed the indentation level of a code snippet in the `dagster-polars` documentation. Thanks [@danielgafni](https://github.com/danielgafni)! + +### Dagster Cloud + +- The Dagster Cloud agent will now monitor the code servers that it spins to detect whether they have stopped serving requests, and will automatically redeploy the code server if it has stopped responding for an extended period of time. +- New additions and bugfixes in Insights: + - Added per-metric cost estimation. Estimates can be added via the “Insights settings” button, and will appear in the table and chart for that metric. + - Branch deployments are now included in the deployment filter control. + - In the Deployments view, fixed deployment links in the data table. + - Added support for BigQuery cost metrics. + +# 1.6.14 (core) / 0.22.14 (libraries) + +### Bugfixes + +- [dagster-dbt] Fixed some issues with building column lineage metadata. + +# 1.6.13 (core) / 0.22.13 (libraries) + +### Bugfixes + +- Fixed a bug where an asset with a dependency on a subset of the keys of a parent multi-asset could sometimes crash asset job construction. +- Fixed a bug where a Definitions object containing assets having integrated asset checks and multiple partitions definitions could not be loaded. + +# 1.6.12 (core) / 0.22.12 (libraries) + +### New + +- `AssetCheckResult` now has a text `description` property. Check evaluation descriptions are shown in the Checks tab on the asset details page. +- Introduced `TimestampMetadataValue`. Timestamp metadata values are represented internally as seconds since the Unix epoch. They can be constructed using `MetadataValue.timestamp`. In the UI, they’re rendered in the local timezone, like other timestamps in the UI. +- `AssetSelection.checks` can now accept `AssetCheckKeys` as well as `AssetChecksDefinition`. +- [community-contribution] Metadata attached to an output at runtime (via either `add_output_metadata` or by passing to `Output`) is now available on `HookContext` under the `op_output_metadata` property. Thanks [@JYoussouf](https://github.com/JYoussouf)! +- [experimental] `@asset`, `AssetSpec`, and `AssetOut` now accept a `tags` property. Tags are key-value pairs meant to be used for organizing asset definitions. If `"__dagster_no_value"` is set as the value, only the key will be rendered in the UI. `AssetSelection.tag` allows selecting assets that have a particular tag. +- [experimental] Asset tags can be used in asset CLI selections, e.g. `dagster asset materialize --select tag:department=marketing` +- [experimental][dagster-dbt] Tags can now be configured on dbt assets, using `DagsterDbtTranslator.get_tags`. By default, we take the dbt tags configured on your dbt models, seeds, and snapshots. +- [dagster-gcp] Added get_gcs_keys sensor helper function. + +### Bugfixes + +- Fixed a bug that prevented external assets with dependencies from displaying properly in Dagster UI. +- Fix a performance regression in loading code locations with large multi-assets. +- [community-contribution] [dagster-databricks] Fix a bug with the `DatabricksJobRunner` that led to an inability to use dagster-databricks with Databricks instance pools. Thanks [@smats0n](https://github.com/smats0n)! +- [community-contribution] Fixed a bug that caused a crash when external assets had hyphens in their `AssetKey`. Thanks [@maxfirman](https://github.com/maxfirman)! +- [community-contribution] Fix a bug with `load_assets_from_package_module` that would cause a crash when any submodule had the same directory name as a dependency. Thanks [@CSRessel](https://github.com/CSRessel)! +- [community-contribution] Fixed a mypy type error, thanks @parthshyara! +- [community-contribution][dagster-embedded-elt] Fixed an issue where Sling assets would not properly read group and description metadata from replication config, thanks @jvyoralek! +- [community-contribution] Ensured annotations from the helm chart properly propagate to k8s run pods, thanks @maxfirman! + +### Dagster Cloud + +- Fixed an issue in Dagster Cloud Serverless runs where multiple runs simultaneously materializing the same asset would sometimes raise a “Key not found” exception. +- Fixed an issue when using [agent replicas](https://docs.dagster.io/dagster-cloud/deployment/agents/running-multiple-agents#running-multiple-agents-in-the-same-environment) where one replica would sporadically remove a code server created by another replica due to a race condition, leading to a “code server not found” or “Deployment not found” exception. +- [experimental] The metadata key for specifying column schema that will be rendered prominently on the new Overview tab of the asset details page has been changed from `"columns"` to `"dagster/column_schema"`. Materializations using the old metadata key will no longer result in the Columns section of the tab being filled out. +- [ui] Fixed an Insights bug where loading a view filtered to a specific code location would not preserve that filter on pageload. + +## 1.6.11 (core) / 0.22.11 (libraries) + +### Bugfixes + +- Fixed an issue where `dagster dev` or the Dagster UI would display an error when loading jobs created with op or asset selections. + +## 1.6.10 (core) / 0.22.10 (libraries) + +### New + +- Latency improvements to the scheduler when running many simultaneous schedules. + +### Bugfixes + +- The performance of loading the Definitions snapshot from a code server when large `@multi_asset` s are in use has been drastically improved. +- The snowflake quickstart example project now renames the “by” column to avoid reserved snowflake names. Thanks @[jcampbell](https://github.com/jcampbell)! +- The existing group name (if any) for an asset is now retained if `the_asset.with_attributes` is called without providing a group name. Previously, the existing group name was erroneously dropped. Thanks @[ion-elgreco](https://github.com/ion-elgreco)! +- [dagster-dbt] Fixed an issue where Dagster events could not be streamed from `dbt source freshness`. +- [dagster university] Removed redundant use of `MetadataValue` in Essentials course. Thanks @[stianthaulow](https://github.com/stianthaulow)! +- [ui] Increased the max number of plots on the asset plots page to 100. + +### Breaking Changes + +- The `tag_keys` argument on `DagsterInstance.get_run_tags`is no longer optional. This has been done to remove an easy way of accidentally executing an extremely expensive database operation. + +### Dagster Cloud + +- The maximum number of concurrent runs across all branch deployments is now configurable. This setting can now be set via GraphQL or the CLI. +- [ui] In Insights, fixed display of table rows with zero change in value from the previous time period. +- [ui] Added deployment-level Insights. +- [ui] Fixed an issue causing void invoices to show up as “overdue” on the billing page. +- [experimental] Branch deployments can now indicate the new and modified assets in the branch deployment as compared to the main deployment. To enable this feature, turn on the “Enable experimental branch deployment asset graph diffing” user setting. + +## 1.6.9 (core) / 0.22.9 (libraries) + +### New + +- [ui] When viewing logs for a run, the date for a single log row is now shown in the tooltip on the timestamp. This helps when viewing a run that takes place over more than one date. +- Added suggestions to the error message when selecting asset keys that do not exist as an upstream asset or in an `AssetSelection.` +- Improved error messages when trying to materialize a subset of a multi-asset which cannot be subset. +- [dagster-snowflake] `dagster-snowflake` now requires `snowflake-connector-python>=3.4.0` +- [embedded-elt] `@sling_assets` accepts an optional name parameter for the underlying op +- [dagster-openai] `dagster-openai` library is now available. +- [dagster-dbt] Added a new setting on `DagsterDbtTranslatorSettings` called `enable_duplicate_source_asset_keys` that allows users to set duplicate asset keys for their dbt sources. Thanks @hello-world-bfree! +- Log messages in the Dagster daemon for unloadable sensors and schedules have been removed. +- [ui] Search now uses a cache that persists across pageloads which should greatly improve search performance for very large orgs. +- [ui] groups/code locations in the asset graph’s sidebar are now sorted alphabetically. + +### Bugfixes + +- Fixed issue where the input/output schemas of configurable IOManagers could be ignored when providing explicit input / output run config. +- Fixed an issue where enum values could not properly have a default value set in a `ConfigurableResource`. +- Fixed an issue where graph-backed assets would sometimes lose user-provided descriptions due to a bug in internal copying. +- [auto-materialize] Fixed an issue introduced in 1.6.7 where updates to ExternalAssets would be ignored when using AutoMaterializePolicies which depended on parent updates. +- [asset checks] Fixed a bug with asset checks in step launchers. +- [embedded-elt] Fix a bug when creating a `SlingConnectionResource` where a blank keyword argument would be emitted as an environment variable +- [dagster-dbt] Fixed a bug where emitting events from `dbt source freshness` would cause an error. +- [ui] Fixed a bug where using the “Terminate all runs” button with filters selected would not apply the filters to the action. +- [ui] Fixed an issue where typing a search query into the search box before the search data was fetched would yield “No results” even after the data was fetched. + +### Community Contributions + +- [docs] fixed typo in embedded-elt.mdx (thanks [@cameronmartin](https://github.com/cameronmartin))! +- [dagster-databricks] log the url for the run of a databricks job (thanks [@smats0n](https://github.com/smats0n))! +- Fix missing partition property (thanks [christeefy](https://github.com/christeefy))! +- Add op_tags to @observable_source_asset decorator (thanks [@maxfirman](https://github.com/maxfirman))! +- [docs] typo in MultiPartitionMapping docs (thanks [@dschafer](https://github.com/dschafer)) +- Allow github actions to checkout branch from forked repo for docs changes (ci fix) (thanks [hainenber](https://github.com/hainenber))! + +### Experimental + +- [asset checks] UI performance of asset checks related pages has been improved. +- [dagster-dbt] The class `DbtArtifacts` has been added for managing the behavior of rebuilding the manifest during development but expecting a pre-built one in production. + +### Documentation + +- Added example of writing compute logs to AWS S3 when customizing agent configuration. +- "Hello, Dagster" is now "Dagster Quickstart" with the option to use a Github Codespace to explore Dagster. +- Improved guides and reference to better running multiple isolated agents with separate queues on ECS. + +### Dagster Cloud + +- Microsoft Teams is now supported for alerts. [Documentation](https://docs.dagster.io/dagster-cloud/managing-deployments/setting-up-alerts) +- A `send sample alert` button now exists on both the alert policies page and in the alert policies editor to make it easier to debug and configure alerts without having to wait for an event to kick them off. + +## 1.6.8 (core) / 0.22.8 (libraries) + +### Bugfixes + +- [dagster-embedded-elt] Fixed a bug in the `SlingConnectionResource` that raised an error when connecting to a database. + +### Experimental + +- [asset checks] `graph_multi_assets` with `check_specs` now support subsetting. + +## 1.6.7 (core) / 0.22.7 (libraries) + +### New + +- Added a new `run_retries.retry_on_op_or_asset_failures` setting that can be set to false to make run retries only occur when there is an unexpected failure that crashes the run, allowing run-level retries to co-exist more naturally with op or asset retries. See [the docs](https://docs.dagster.io/deployment/run-retries#combining-op-and-run-retries) for more information. +- `dagster dev` now sets the environment variable `DAGSTER_IS_DEV_CLI` allowing subprocesses to know that they were launched in a development context. +- [ui] The Asset Checks page has been updated to show more information on the page itself rather than in a dialog. + +### Bugfixes + +- [ui] Fixed an issue where the UI disallowed creating a dynamic partition if its name contained the “|” pipe character. +- AssetSpec previously dropped the metadata and code_version fields, resulting in them not being attached to the corresponding asset. This has been fixed. + +### Experimental + +- The new `@multi_observable_source_asset` decorator enables defining a set of assets that can be observed together with the same function. +- [dagster-embedded-elt] New Asset Decorator `@sling_assets` and Resource `SlingConnectionResource` have been added for the `[dagster-embedded-elt.sling](http://dagster-embedded-elt.sling)` package. Deprecated `build_sling_asset`, `SlingSourceConnection` and `SlingTargetConnection`. +- Added support for op-concurrency aware run dequeuing for the `QueuedRunCoordinator`. + +### Documentation + +- Fixed reference documentation for isolated agents in ECS. +- Corrected an example in the Airbyte Cloud documentation. +- Added API links to OSS Helm deployment guide. +- Fixed in-line pragmas showing up in the documentation. + +### Dagster Cloud + +- Alerts now support Microsoft Teams. +- [ECS] Fixed an issue where code locations could be left undeleted. +- [ECS] ECS agents now support setting multiple replicas per code server. +- [Insights] You can now toggle the visibility of a row in the chart by clicking on the dot for the row in the table. +- [Users] Added a new column “Licensed role” that shows the user's most permissive role. + +## 1.6.6 (core) / 0.22.6 (libraries) + +### New + +- Dagster officially supports Python 3.12. +- `dagster-polars` has been added as an integration. Thanks @danielgafni! +- [dagster-dbt] `@dbt_assets` now supports loading projects with semantic models. +- [dagster-dbt] `@dbt_assets` now supports loading projects with model versions. +- [dagster-dbt] `get_asset_key_for_model` now supports retrieving asset keys for seeds and snapshots. Thanks @aksestok! +- [dagster-duckdb] The Dagster DuckDB integration supports DuckDB version 0.10.0. +- [UPath I/O manager] If a non-partitioned asset is updated to have partitions, the file containing the non-partitioned asset data will be deleted when the partitioned asset is materialized, rather than raising an error. + +### Bugfixes + +- Fixed an issue where creating a backfill of assets with dynamic partitions and a backfill policy would sometimes fail with an exception. +- Fixed an issue with the type annotations on the `@asset` decorator causing a false positive in Pyright strict mode. Thanks @tylershunt! +- [ui] On the asset graph, nodes are slightly wider allowing more text to be displayed, and group names are no longer truncated. +- [ui] Fixed an issue where the groups in the asset graph would not update after an asset was switched between groups. +- [dagster-k8s] Fixed an issue where setting the `security_context` field on the `k8s_job_executor` didn't correctly set the security context on the launched step pods. Thanks @krgn! + +### Experimental + +- Observable source assets can now yield `ObserveResult`s with no `data_version`. +- You can now include `FreshnessPolicy`s on observable source assets. These assets will be considered “Overdue” when the latest value for the “dagster/data_time” metadata value is older than what’s allowed by the freshness policy. +- [ui] In Dagster Cloud, a new feature flag allows you to enable an overhauled asset overview page with a high-level stakeholder view of the asset’s health, properties, and column schema. + +### Documentation + +- Updated docs to reflect newly-added support for Python 3.12. + +### Dagster Cloud + +- [kubernetes] Fixed an issue where the Kubernetes agent would sometimes leave dangling kubernetes services if the agent was interrupted during the middle of being terminated. + +## 1.6.5 (core) / 0.22.5 (libraries) + +### New + +- Within a backfill or within auto-materialize, when submitting runs for partitions of the same assets, runs are now submitted in lexicographical order of partition key, instead of in an unpredictable order. +- [dagster-k8s] Include k8s pod debug info in run worker failure messages. +- [dagster-dbt] Events emitted by `DbtCliResource` now include metadata from the dbt adapter response. This includes fields like `rows_affected`, `query_id` from the Snowflake adapter, or `bytes_processed` from the BigQuery adapter. + +### Bugfixes + +- A previous change prevented asset backfills from grouping multiple assets into the same run when using BackfillPolicies under certain conditions. While the backfills would still execute in the proper order, this could lead to more individual runs than necessary. This has been fixed. +- [dagster-k8s] Fixed an issue introduced in the 1.6.4 release where upgrading the Helm chart without upgrading the Dagster version used by user code caused failures in jobs using the `k8s_job_executor`. +- [instigator-tick-logs] Fixed an issue where invoking `context.log.exception` in a sensor or schedule did not properly capture exception information. +- [asset-checks] Fixed an issue where additional dependencies for dbt tests modeled as Dagster asset checks were not properly being deduplicated. +- [dagster-dbt] Fixed an issue where dbt model, seed, or snapshot names with periods were not supported. + +### Experimental + +- `@observable_source_asset`-decorated functions can now return an `ObserveResult`. This allows including metadata on the observation, in addition to a data version. This is currently only supported for non-partitioned assets. +- [auto-materialize] A new `AutoMaterializeRule.skip_on_not_all_parents_updated_since_cron` class allows you to construct `AutoMaterializePolicys` which wait for all parents to be updated after the latest tick of a given cron schedule. +- [Global op/asset concurrency] Ops and assets now take run priority into account when claiming global op/asset concurrency slots. + +### Documentation + +- Fixed an error in our asset checks docs. Thanks [@vaharoni](https://github.com/vaharoni)! +- Fixed an error in our Dagster Pipes Kubernetes docs. Thanks [@cameronmartin](https://github.com/cameronmartin)! +- Fixed an issue on the Hello Dagster! guide that prevented it from loading. +- Add specific capabilities of the Airflow integration to the Airflow integration page. +- Re-arranged sections in the I/O manager concept page to make info about using I/O versus resources more prominent. + +# 1.6.4 (core) / 0.22.4 (libraries) + +### New + +- `build_schedule_from_partitioned_job` now supports creating a schedule from a static-partitioned job (Thanks `@craustin`!) +- [dagster-pipes] `PipesK8sClient` will now autodetect the namespace when using in-cluster config. (Thanks `@aignas`!) +- [dagster-pipes] `PipesK8sClient` can now inject the context in to multiple containers. (Thanks `@aignas`!) +- [dagster-snowflake] The Snowflake Pandas I/O manager now uses the `write_pandas` method to load Pandas DataFrames in Snowflake. To support this change, the database connector was switched from `SqlDbConnection` to `SnowflakeConnection` . +- [ui] On the overview sensors page you can now filter sensors by type. +- [dagster-deltalake-polars] Added LazyFrame support (Thanks `@ion-elgreco`!) +- [dagster-dbt] When using `@dbt_assets` and multiple dbt resources produce the same `AssetKey`, we now display an exception message that highlights the file paths of the misconfigured dbt resources in your dbt project. +- [dagster-k8s] The debug info reported upon failure has been improved to include additional information from the Job. (Thanks `@jblawatt`!) +- [dagster-k8s] Changed the Dagster Helm chart to apply `automountServiceAccountToken: false` to the default service account used by the Helm chart, in order to better comply with security policies. (Thanks `@MattyKuzyk`!) + +### Bugfixes + +- A unnecessary thread lock has been removed from the sensor daemon. This should improve sensor throughput for users with many sensors who have enabled threading. +- Retry from failure behavior has been improved for cases where dynamic steps were interrupted. +- Previously, when backfilling a set of assets which shared a BackfillPolicy and PartitionsDefinition, but had a non-default partition mapping between them, a run for the downstream asset could be launched at the same time as a separate run for the upstream asset, resulting in inconsistent partition ordering. Now, the downstream asset will only execute after the parents complete. (Thanks `@ruizh22`!) +- Previously, asset backfills would raise an exception if the code server became unreachable mid-iteration. Now, the backfill will pause until the next evaluation. +- Fixed a bug that was causing ranged backfills over dynamically partitioned assets to fail. +- [dagster-pipes] `PipesK8sClient` has improved handling for init containers and additional containers. (Thanks `@aignas`!) +- Fixed the `last_sensor_start_time` property of the `SensorEvaluationContext`, which would get cleared on ticks after the first tick after the sensor starts. +- [dagster-mysql] Fixed the optional `dagster instance migrate --bigint-migration`, which caused some operational errors on mysql storages. +- [dagster-dbt] Fixed a bug introduced in 1.6.3 that caused errors when ingesting asset checks with multiple dependencies. + +### Deprecations + +- The following methods on `AssetExecutionContext` have been marked deprecated, with their suggested replacements in parenthesis: + - `context.op_config` (`context.op_execution_context.op_config`) + - `context.node_handle` (`context.op_execution_context.node_handle`) + - `context.op_handle` (`context.op_execution_context.op_handle`) + - `context.op` (`context.op_execution_context.op`) + - `context.get_mapping_key` (`context.op_execution_context.get_mapping_key`) + - `context.selected_output_names` (`context.op_execution_context.selected_output_names`) + - `context.dagster_run` (`context.run`) + - `context.run_id` (`context.run.run_id`) + - `context.run_config` (`context.run.run_config`) + - `context.run_tags` (`context.run.tags`) + - `context.has_tag` (`key in context.run.tags`) + - `context.get_tag` (`context.run.tags.get(key)`) + - `context.get_op_execution_context` (`context.op_execution_context`) + - `context.asset_partition_key_for_output` (`context.partition_key`) + - `context.asset_partition_keys_for_output` (`context.partition_keys`) + - `context.asset_partitions_time_window_for_output` (`context.partition_time_window`) + - `context.asset_partition_key_range_for_output` (`context.partition_key_range`) + +### Experimental + +- [asset checks] `@asset_check` now has a `blocking` parameter. When this is enabled, if the check fails with severity `ERROR` then any downstream assets in the same run won’t execute. + +### Documentation + +- The Branch Deployment docs have been updated to reflect support for backfills +- Added Dagster’s maximum supported Python version (3.11) to Dagster University and relevant docs +- Added documentation for recommended partition limits (a maximum of 25K per asset). +- References to the Enterprise plan have been renamed to Pro, to reflect recent plan name changes +- Added syntax example for setting environment variables in PowerShell to our dbt with Dagster tutorial +- [Dagster University] Dagster Essentials to Dagster v1.6, and introduced the usage of `MaterializeResult` +- [Dagster University] Fixed a typo in the Dagster University section on adding partitions to an asset (Thanks Brandon Peebles!) +- [Dagster University] Corrected lesson where sensors are covered (Thanks onefloid!) + +### Dagster Cloud + +- Agent tokens can now be locked down to particular deployments. Agents will not be able to run any jobs scheduled for deployments that they are not permitted to access. By default, agent tokens have access to all deployments in an organization. Use the `Edit` button next to an agent token on the `Tokens` tab in `Org Settings` to configure permissions for a particular token. You must be an Organization Admin to edit agent token permissions. + # 1.6.3 (core) / 0.22.3 (libraries) ### New @@ -1279,7 +2551,7 @@ def my_op(context: OpExecutionContext): - `dagit` → `dagsterWebserver` - `ingress.dagit` → `ingress.dagsterWebserver` - `ingress.readOnlyDagit` → `ingress.readOnlyDagsterWebserver` -- [Dagster Cloud ECS Agent] We've introduced performance improvements that rely on the [AWS Resource Groups Tagging API](https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/overview.html). To enable, grant your agent's IAM policy permission to `tag:DescribeResources`. Without this policy, the ECS Agent will log a deprecation warning and fall back to its old behavior (listing all ECS services in the cluster and then listing each service's tags). +- [Dagster Cloud ECS Agent] We've introduced performance improvements that rely on the [AWS Resource Groups Tagging API](https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/overview.html). To enable, grant your agent's IAM policy permission to `tag:GetResources`. Without this policy, the ECS Agent will log a deprecation warning and fall back to its old behavior (listing all ECS services in the cluster and then listing each service's tags). - `DbtCliClientResource`, `dbt_cli_resource` and `DbtCliOutput` are now being deprecated in favor of `DbtCliResource`. - A number of arguments on `load_assets_from_dbt_project` and `load_assets_from_dbt_manifest` are now deprecated in favor of other options. See the migration for details. @@ -3170,14 +4442,14 @@ nux: ### New - Tags can now be provided to an asset reconciliation sensor and will be applied to all RunRequests returned by the sensor. -- If you don’t explicitly specify a DagsterType on a graph input, but all the inner inputs that the graph input maps to have the same DagsterType, the graph input’s DagsterType will be set to the the DagsterType of the inner inputs. +- If you don’t explicitly specify a DagsterType on a graph input, but all the inner inputs that the graph input maps to have the same DagsterType, the graph input’s DagsterType will be set to the DagsterType of the inner inputs. - [dagster-airbyte] `load_assets_from_airbyte_project` now caches the project data generated at repo load time so it does not have to be regenerated in subprocesses. - [dagster-airbyte] Output table schema metadata is now generated at asset definition time when using `load_assets_from_airbyte_instance` or `load_assets_from_airbyte_project`. - [dagit] The run timeline now groups all jobs by repository. You can collapse or expand each repository in this view by clicking the repository name. This state will be preserved locally. You can also hold `Shift` while clicking the repository name, and all repository groups will be collapsed or expanded accordingly. - [dagit] In the launchpad view, a “Remove all” button is now available once you have accrued three or more tabs for that job, to make it easier to clear stale configuration tabs from view. - [dagit] When scrolling through the asset catalog, the toolbar is now sticky. This makes it simpler to select multiple assets and materialize them without requiring you to scroll back to the top of the page. - [dagit] A “Materialize” option has been added to the action menu on individual rows in the asset catalog view. -- [dagster-aws] The `EcsRunLauncher` now allows you to pass in a dictionary in the `task_definition` config field that specifies configuration for the task definition of the launched run, including role ARNs and a list of sidecar containers to include. Previously, the task definition could only be configured by passing in a task definition ARN or by basing the the task definition off of the task definition of the ECS task launching the run. See the [docs](https://docs.dagster.io/_apidocs/libraries/dagster-aws#dagster_aws.ecs.EcsRunLauncher) for the full set of available config. +- [dagster-aws] The `EcsRunLauncher` now allows you to pass in a dictionary in the `task_definition` config field that specifies configuration for the task definition of the launched run, including role ARNs and a list of sidecar containers to include. Previously, the task definition could only be configured by passing in a task definition ARN or by basing the task definition off of the task definition of the ECS task launching the run. See the [docs](https://docs.dagster.io/_apidocs/libraries/dagster-aws#dagster_aws.ecs.EcsRunLauncher) for the full set of available config. ### Bugfixes diff --git a/MIGRATION.md b/MIGRATION.md index 0c13e4e68de3b..519920c737cdd 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -2,6 +2,101 @@ When new releases include breaking changes or deprecations, this document describes how to migrate. +## Migrating to 1.8.0 + +### Notable behavior changes + +- The `Definitions` constructor will no longer raise errors when the provided definitions aren’t mutually resolve-able – e.g. when there are conflicting definitions with the same name, unsatisfied resource dependencies, etc. These errors will still be raised at code location load time. The new `Definitions.validate_loadable` static method also allows performing the validation steps that used to occur in constructor. +- The “Unsynced” label on an asset is no longer transitive, i.e. it no longer displays purely on account of a parent asset being labeled “Unsynced”. This helps avoid “Unsynced label fatigue”, where huge portions of the graph often have the label because of a distant ancestor. And it also helps the asset graph UI load faster. +- The Run Status column on the Backfills page has been removed. This column was only filled out for backfills of jobs. Users should instead click on the backfill to see the status of each run. +- The default behavior for evaluating `AutoMaterializePolicy` and `AutomationCondition` objects has changed. Previously, all assets were evaluated in a single process on the `AssetDaemon` , and evaluation history would show up in the UI in a special-purpose tab. Now, a default `AutomationConditionSensorDefinition` with the name `"default_automation_condition_sensor"` will be constructed for each code location, and a history of evaluations can be accessed by navigating to the page of that sensor. These changes are intended to provide a consistent UI/UX for interacting with automation concepts, and the sensor-based APIs allow for greater isolation between separate sets of assets. + - The core work of these sensors is still handled by the `AssetDaemon`, so this will need to continue running for your deployment. + - If desired, you can retain the current behavior by setting the following in your `dagster.yaml` file: + ```yaml + auto_materialize: + use_sensors: true + ``` +- The `datetime` objects that are exposed in Dagster public APIs are now standard Python `datetime.datetime` objects with timezones, instead of [Pendulum](https://pendulum.eustace.io/docs/) `datetime` objects. Technically, this is not a breaking change since Dagster’s public API uses `datetime.datetime` in our APIs, but Pendulum datetimes expose some methods (like `add` and `subtract`) that are not available on standard `datetime.datetime` objects. If your code was using methods that are only available on `Pendulum` datetimes, you can transform your `datetimes` back to Pendulum datetimes before using them. + + - For example, an asset like this: + + ```python + from dagster import asset, AssetExecutionContext + + @asset + def my_asset(context: AssetExecutionContext): + window_start, window_end = context.partition_time_window + in_an_hour = window_start.add(hours=1) # will break since add() is only defined in pendulum + ``` + + - could be changed to this in order to continue using pendulum datetimes: + + ```python + + from dagster import asset, AssetExecutionContext + import pendulum + + @asset + def my_asset(context: AssetExecutionContext): + window_start, window_end = context.partition_time_window + window_start = pendulum.instance(window_start) # transform to a pendulum time + + in_an_hour = window_start.add(hours=1) # will continue working + ``` + +### Breaking changes + +- `AutoMaterializeSensorDefinition` has been renamed to `AutomationConditionSensorDefinition`. All other functionality is identical. +- “Op job versioning and memoization”, an experimental and deprecated pre-1.0 feature, has been removed. This feature has been superseded for a long time by `code_version` , data versions, and automation conditions. `MemoizableIOManager`, `VersionStrategy`, `SourceHashVersionStrategy`, `OpVersionContext`, `ResourceVersionContext`, and `MEMOIZED_RUN_TAG` have been removed. +- The experimental and deprecated `build_asset_with_blocking_check` has been removed. Use the `blocking` argument on `@asset_check` instead. +- [dagster-dbt] Support for setting freshness policies through dbt metadata on field `+meta.dagster_freshness_policy` has been removed. Use `+meta.dagster.freshness_policy` instead. +- [dagster-dbt] `KeyPrefixDagsterDbtTranslator` has been removed. To modify the asset keys for a set of dbt assets, implement`DagsterDbtTranslator.get_asset_key()` instead. +- [dagster-dbt] Support for setting auto-materialize policies through dbt metadata on field `+meta.dagster_auto_materialize_policy` has been removed. Use `+meta.dagster.auto_materialize_policy` instead. +- [dagster-dbt] Support for `dbt-core==1.6.*` has been removed because the version is now end-of-life. +- [dagster-dbt] Support for `load_assets_from_dbt_project`, `load_assets_from_dbt_manifest`, and `dbt_cli_resource` has been removed. Use `@dbt_assets`, `DbtCliResource`, and `DbtProject` instead to define how to load dbt assets from a dbt project and to execute them. +- [dagster-dbt] Support for rebuilt ops like `dbt_run_op`, `dbt_compile_op`, etc has been removed. Use `@op` and `DbtCliResource` directly to execute dbt commands in an op. + +### Deprecations + +- The experimental `external_assets_from_specs` API has been deprecated. Instead, you can directly pass `AssetSpec` objects to the `assets` argument of the `Definitions` constructor. +- `AutoMaterializePolicy`, `AutoMaterializeRule`, and the `auto_materialize_policy` arguments to `@asset` and `AssetSpec` have been marked as deprecated, and the new `AutomationCondition` API and `automation_condition` argument should be used instead. These changes are intended to provide a more consistent, composable, and flexible experience for users interested in asset-focused automation. A full migration guide can be found [here](https://github.com/dagster-io/dagster/discussions/23495), and a more detailed explanation of the thought process behind these changes can be found in the [original RFC](https://github.com/dagster-io/dagster/discussions/22811). + - `AutoMaterializePolicys` and `AutomationConditions` can interoperate without issue, meaning you do not need to migrate all assets at the same time. +- The `partitions_def` parameter on `define_asset_job` is now deprecated. The `partitions_def` for an asset job is determined from the `partitions_def` attributes on the assets it targets, so this parameter is redundant. +- The `asset_partition_key_for_output`, `asset_partition_keys_for_output`, and `asset_partition_key_range_for_output`, and `asset_partitions_time_window_for_output` methods on `OpExecutionContext` have been deprecated. Instead, use the corresponding property: `partition_key`, `partition_keys`, `partition_key_range`, or `partition_time_window`. +- `SourceAsset` is deprecated, in favor of `AssetSpec`. You can now use `AssetSpec`s in any of the places you could previously use `SourceAsset`s, including passing them to the `assets` argument of `Definitions`, passing them to the `assets` argument of `materialize`, and supplying them as inputs in op graphs. `AssetSpec` has all the properties that `SourceAsset` does, except for `io_manager_key`. To set an IO manager key on an `AssetSpec`, you can supply a metadata entry with the `"dagster/io_manager_key"` key: + + ```python + # before + from dagster import SourceAsset + my_asset = SourceAsset("my_asset", io_manager_key="abc") + + # after + from dagster import AssetSpec + my_asset = AssetSpec("my_asset", metadata={"dagster/io_manager_key": "abc"}) + ``` + +- [dagster-shell] The `dagster-shell` package, which exposes `create_shell_command_op` and `create_shell_script_op`, has been deprecated. Instead, use `PipesSubprocessClient`, from the `dagster` package. +- [dagster-airbyte] `load_assets_from_airbyte_project` is now deprecated, because the Octavia CLI that it relies on is an experimental feature that is no longer supported. Use `build_airbyte_assets` or `load_assets_from_airbyte_project` instead. + +## Migrating to 1.7.0 + +### Breaking Changes + +- Creating a run with a custom non-UUID `run_id` was previously private and only used for testing. It will now raise an exception. +- [community-contribution] Previously, calling `get_partition_keys_in_range` on a `MultiPartitionsDefinition` would erroneously return partition keys that were within the one-dimensional range of alphabetically-sorted partition keys for the definition. Now, this method returns the cartesian product of partition keys within each dimension’s range. Thanks, [@mst](https://github.com/mst)! +- Added `AssetCheckExecutionContext` to replace `AssetExecutionContext` as the type of the `context` param passed in to `@asset_check` functions. `@asset_check` was an experimental decorator. +- [experimental] `@classmethod` decorators have been removed from `[dagster-embedded-slt.sling](http://dagster-embedded-slt.sling)` `DagsterSlingTranslator` +- [dagster-dbt] `@classmethod` decorators have been removed from `DagsterDbtTranslator`. +- [dagster-k8s] The default merge behavior when raw kubernetes config is supplied at multiple scopes (for example, at the instance level and for a particluar job) has been changed to be more consistent. Previously, configuration was merged shallowly by default, with fields replacing other fields instead of appending or merging. Now, it is merged deeply by default, with lists appended to each other and dictionaries merged, in order to be more consistent with how kubernetes configuration is combined in all other places. See [the docs](https://docs.dagster.io/deployment/guides/kubernetes/customizing-your-deployment#precedence-rules) for more information, including how to restore the previous default merge behavior. + +### Deprecations + +- `AssetSelection.keys()` has been deprecated. Instead, you can now supply asset key arguments to `AssetSelection.assets()` . +- Run tag keys with long lengths and certain characters are now deprecated. For consistency with asset tags, run tags keys are expected to only contain alpha-numeric characters, dashes, underscores, and periods. Run tag keys can also contain a prefix section, separated with a slash. The main section and prefix section of a run tag are limited to 63 characters. +- `AssetExecutionContext` has been simplified. Op-related methods and methods with existing access paths have been marked deprecated. For a full list of deprecated methods see this [GitHub Discussion](https://github.com/dagster-io/dagster/discussions/20974). +- The `metadata` property on `InputContext` and `OutputContext` has been deprecated and renamed to `definition_metadata` . +- `FreshnessPolicy` is now deprecated. For monitoring freshness, use freshness checks instead. If you are using `AutoMaterializePolicy.lazy()`, `FreshnessPolicy` is still recommended, and will continue to be supported until an alternative is provided. + ## Migrating to 1.6.0 ### Breaking changes @@ -188,7 +283,7 @@ def a_downstream_asset(): ... ``` -- [Dagster Cloud ECS Agent] We've introduced performance improvements that rely on the [AWS Resource Groups Tagging API](https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/overview.html). To enable, grant your agent's IAM policy permission to `tag:DescribeResources`. Without this policy, the ECS Agent will log a deprecation warning and fall back to its old behavior (listing all ECS services in the cluster and then listing each service's tags). +- [Dagster Cloud ECS Agent] We've introduced performance improvements that rely on the [AWS Resource Groups Tagging API](https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/overview.html). To enable, grant your agent's IAM policy permission to `tag:GetResources`. Without this policy, the ECS Agent will log a deprecation warning and fall back to its old behavior (listing all ECS services in the cluster and then listing each service's tags). - [dagster-dbt] `DbtCliClientResource`, `dbt_cli_resource` and `DbtCliOutput` are now being deprecated in favor of `DbtCliResource`. `dagster-dbt` Asset APIs like `load_assets_from_dbt_manifest` and `load_assets_from_dbt_project` will continue to work if given either a `DbtCliClientResource` or `DbtCliResource`. ```python diff --git a/Makefile b/Makefile index 928b85572d26e..31adeb0c25445 100644 --- a/Makefile +++ b/Makefile @@ -9,14 +9,19 @@ pyright: python scripts/run-pyright.py --all +install_prettier: + npm install -g prettier + install_pyright: - pip install -e 'python_modules/dagster[pyright]' + pip install -e 'python_modules/dagster[pyright]' -e 'python_modules/dagster-pipes' rebuild_pyright: python scripts/run-pyright.py --all --rebuild +# Skip typecheck so that this can be used to test if all requirements can successfully be resolved +# in CI independently of typechecking. rebuild_pyright_pins: - python scripts/run-pyright.py --update-pins + python scripts/run-pyright.py --update-pins --skip-typecheck quick_pyright: python scripts/run-pyright.py --diff @@ -25,22 +30,22 @@ unannotated_pyright: python scripts/run-pyright.py --unannotated ruff: - -ruff --fix . + -ruff check --fix . ruff format . check_ruff: - ruff . + ruff check . ruff format --check . check_prettier: #NOTE: excludes README.md because it's a symlink - yarn exec --cwd js_modules/dagster-ui/packages/eslint-config -- prettier `git ls-files \ + prettier `git ls-files \ 'python_modules/*.yml' 'python_modules/*.yaml' 'helm/*.yml' 'helm/*.yaml' \ ':!:helm/**/templates/*.yml' ':!:helm/**/templates/*.yaml' '*.md' ':!:docs/*.md' \ ':!:README.md'` --check prettier: - yarn exec --cwd js_modules/dagster-ui/packages/eslint-config -- prettier `git ls-files \ + prettier `git ls-files \ 'python_modules/*.yml' 'python_modules/*.yaml' 'helm/*.yml' 'helm/*.yaml' \ ':!:helm/**/templates/*.yml' ':!:helm/**/templates/*.yaml' '*.md' ':!:docs/*.md' \ ':!:README.md'` --write diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 547f386cba7f2..e8acffd0f9c5c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -8,15 +8,15 @@ parameters: - name: py3_versions type: object default: - - "3.9" + - "3.10" - name: py3_env_suffixes type: object default: - api_tests - cli_tests - core_tests_pydantic1 - - core_tests_pydantic2 - general_tests + - launcher_tests - daemon_tests - daemon_sensor_tests - scheduler_tests @@ -38,11 +38,8 @@ jobs: inputs: versionSpec: "$(python.version)" architecture: "x64" - - script: choco install vcpython27 --yes - condition: eq(variables['python.version'], '2.7') - displayName: "Install vcpython27" - - script: pip install "tox<4.0.0" - displayName: "Install tox" + - script: pip install "tox<4.0.0" uv + displayName: "Install tox & uv" - script: cd python_modules\dagster && tox -e %TOXENV% && cd ..\.. displayName: "Run tests" - task: PublishTestResults@2 diff --git a/docs/.gitignore b/docs/.gitignore index e7d746ffcb4c2..51e706ab72d43 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1,3 +1,6 @@ sphinx/_build/ .env next/.env +docs-beta/.docusaurus +*.duckdb +*.sqlite diff --git a/docs/.vale.ini b/docs/.vale.ini new file mode 100644 index 0000000000000..107a1d88d499b --- /dev/null +++ b/docs/.vale.ini @@ -0,0 +1,58 @@ +######################## +# ABOUT # +######################## + +# This file controls the Vale application, specifically what, where, and how it lints. + +# Vale config reference: https://vale.sh/docs/topics/config +# INI syntax: https://ini.unknwon.io/docs/intro + +######################## +# CORE SETTINGS # +######################## + +StylesPath = "vale/styles" +MinAlertLevel = suggestion +Vocab = Dagster + +######################## +# FORMAT ASSOCIATIONS # +######################## + +[formats] +mdx = md + +######################## +# FORMAT-SPECIFIC # +######################## + +[*.{md,mdx,rst}] +BasedOnStyles = Dagster, Terms, Vale + +; References: +; - https://vale.sh/docs/topics/scoping/#non-standard-markup +; - https://github.com/errata-ai/vale/blob/871dafd1e24500cee9d8ad82b25d42a136bb2103/testdata/fixtures/patterns/_vale#L14 + +; Pattern : (\\\{[^\n]+\}) +; Regex101 : https://regex101.com/r/GOx8Z6/2 +; Description : Ignore heading anchor renames + +; Description : Ignore code snippets +; Pattern : (`[^\n^`]+`) +; Regex101 : https://regex101.com/r/c5EE6S/1 + +; Pattern : \[.*\](\(.*\)) +; Regex101 : https://regex101.com/r/GOx8Z6/3 +; Description : Ignore link HREFs + +; Additionally, we include TokenIgnores of `` and `` to strip these HTML elements, +; because when these wrap markdown it causes the markdown linting to fail. For example, code blocks +; within a tab item. + +TokenIgnores = (`[^`]*`), \ + (\[.*\]\([^)]+\)), \ + (\\\{[^}]+\}), \ + (<\/?TabItem.*>), \ + (<\/?Tabs.*>), \ + (), \ + (.*<\/summary>) diff --git a/docs/Makefile b/docs/Makefile index a51494d665fb1..4ea97283c3271 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,24 +1,35 @@ -docs_ruff: - -ruff --fix ../examples/docs_snippets +.PHONY: help + +help: + @egrep -h '\s##\s' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m %-30s\033[0m %s\n", $$1, $$2}' + +docs_ruff: ## Ruff linting and fixing on /examples/docs_snippets + -ruff check --fix ../examples/docs_snippets ruff format ../examples/docs_snippets -apidoc-build: +apidoc-build: ## Build Sphinx docs tox -e sphinx && python scripts/pack_json.py -apidoc-watch-build: +apidoc-watch-build: ## Watch build Sphinx docs watchmedo shell-command \ --patterns='*.rst;conf.py;_ext/*.py' \ --command='make apidoc-build' \ --recursive \ sphinx -next-dev-install: +next-dev-install: ## Install Next.js dependencies cd next; yarn install -next-watch-build dev: +next-watch-build: ## Run Next.js docs website in development mode cd next; yarn dev -mdx-format: +mdx-format: ## Format mdx files cd next; yarn mdx-format mdx-full-format: docs_ruff mdx-format + +mdx: + tox -e sphinx-mdx + +mdx_copy: + cp -rf sphinx/_build/mdx/sections/api/apidocs/* docs-beta/docs/api/ diff --git a/docs/README.md b/docs/README.md index 99ab80db99ec7..ec0f70ac21609 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,143 +1,871 @@ -# Dagster Documentation Site +# Running the docs, v2 > **Submitting a pull request to update the docs?** If so, please verify that the updates follow the [docs style checklist](https://github.com/dagster-io/dagster/blob/master/docs/DOC_CHECKLIST.md). This directory contains the code for the Dagster documentation site at https://docs.dagster.io. The site is built with [NextJS](https://nextjs.org/), with the code for the site living in `next`. To serve the site locally in development mode (hot-reloading), run: +--- + +## Getting started + +To run the Dagster docs locally, you'll need to install [NodeJS](https://nodejs.org/en/download/). The version must be 12.13.0 or later. + +After NodeJS is installed: + +1. Clone the [`dagster-io/dagster` GitHub repository](https://github.com/dagster-io/dagster) to your local machine. +2. Using the Terminal, navigate to the `/docs` directory in the repository and run the following to set up the site's environment: + + ```bash + make next-dev-install + ``` + + **Note**: This is the only time you’ll need to run this command. + +3. Next, start the development server: + + ```bash + make next-watch-build + ``` + + This command also enables hot-reloading, which automatically reloads `localhost:3001` when files in `/content` or `/docs/next` are modified. + +4. Navigate to [https://localhost:3001](https://localhost:3001/) to view a live, rendered version of the docs. + +--- + +## About + +- [Page architecture][docs-page-architecture] +- [Directory structure][docs-site-structure] +- [Site technologies][docs-site-technologies] + +### Page architecture + +![docs-components.png](/docs/next/public/images/readme/docs-components.png) + +### Directory structure + +```yaml +/ +├── /docs +│ ├── /content +│ │ ├── /api +│ │ ├── /[other content folders] +│ │ ├── _apidocs.mdx +│ │ ├── _navigation.json +│ │ └── [other-pages].mdx +│ ├── /dagit-screenshot +│ ├── /next +│ │ ├── /components +│ │ ├── /layouts +│ │ ├── /pages +│ │ ├── /public +│ │ ├── /scripts +│ │ ├── /styles +│ │ └── /util +│ ├── /node_modules +│ ├── /screenshots +│ ├── /scripts +│ └── /sphinx +└── /examples + └── /docs_snippets ``` -# only run the first time, to set up the site's environment -$ make next-dev-install -# runs development server on localhost:3001, watching mdx in `content` dir and site code in `next` -$ make next-watch-build +The following table contains information about the folders you’ll most commonly work in when editing the docs. **Note**: This isn’t an exhaustive list, as it’s unlikely folders and files not in this list will be touched. + +Click the links in the **Path** column to learn more about a specific folder, its contents, and what it’s used for: + +| Path | Description | +| --- | --- | +| [/content][docs-content] | All site content, including the site's navigation (`_navigation.json`). This folder primarily contains `.mdx` files, which are Markdown files with React components. To update doc content, directly edit these files. | +| [/dagit-screenshot][dagit-screenshot] | The Python module for the `dagit-screenshot` CLI tool. Used for automating screenshots from a local Dagit instance. | +| /next | The NextJS code that powers the site, including MDX components, redirects, images and other assets, etc. | +| [/screenshots][dagit-screenshot] | YAML configuration files used by dagit-screenshot. | +| [/sphinx][api-docs] | Specification files in ReStructured Text (`.rst`) used by Sphinx to generate API docs. | +| [/examples/docs_snippets][docs-code-snippets] | A Python package containing the code snippets embedded throughout the docs. | + +### Site technologies + +| Name | Usage | Notes | +| :--- | :--- | :--- | +| Algolia | Search | | +| Google Analytics | Analytics | | +| MDX | Content formatting | A file format that embeds JSX into Markdown files, supporting the import of React components. | +| NextJS | Site framework | A React framework used to build web applications. | +| Prettier | Code formatter | Run as part of the make [make mdx-format][mdx-format] CLI command, Prettier formats content in MDX files. | +| Remark | Markdown processor | | +| Sphinx | API docs | | +| Tailwind | CSS | | +| Vercel | CDN | Hosts the docs. Vercel automatically deploys every push by default, including pushes to `master` and other branches, such as PRs.
  • **Production branch**: `master`. Every push to `master` automatically deploys to [docs.dagster.io](https://docs.dagster.io).
  • **Preview branches**: For every push to a non-production branch, Vercel automatically builds and deploys a preview version of the docs. This is useful for debugging and previewing content changes.
| +| YAML | Page metadata | | +| Yarn | TODO | | + +--- + +## Development + +- [Local][docs-development-local] +- [Pull requests][docs-pull-requests] + +### Local + +We apply CI checks to maintain the quality of our MDX files and code snippets. To ensure your code passes our CI checks, we recommend following this workflow when pushing changes: + +#### 1. Run make black isort + +Applicable only to code snippet changes in `/../examples/docs_snippets/docs_snippets`. Skip if no code snippet changes have been made. + +```bash +# run in repo root +make black isort +``` + +#### 2. Run yarn test + +This command runs internal and external link tests, failing when invalid links are detected. + +```bash +cd /docs/next +yarn test +``` + +#### 3. Run make mdx-format + +This command processes the full corpus of MDX files in `/content` and diffs them against the source, performing several transformations: + +- Format MDX markdown and React code using [Prettier](https://prettier.io/) +- Transform Markdown images (`![]()`) and raw HTML `img` tags into special `Image` components. Refer to the Embedding images section for more info. +- Resolve references to external code snippets and overwrite the corresponding code blocks with the external snippet. Refer to the Embedding code snippets section for more info. + +```bash +cd /docs +make mdx-format ``` -The content for the site is of several types and stored in several places: +If any discrepancies are found after the changes have been pushed, the associated CI step will fail. + +We recommend running this command after other commands and immediately before pushing MDX changes. This ensures that code snippet changes are correctly reflected in MDX files, avoiding a failed CI test. + +#### 4. Push + +After the previous steps are completed, you can push your changes and, if you haven’t already, open a pull request. + +### Pull requests + +For every push made to a pull request, Vercel automatically deploys a preview version of the docs. This is useful for debugging and previewing content changes. + +In the pull request, click the **Visit Preview** link to view the preview: + +![Vercel 'Visit preview' link](/docs/next/public/images/readme/vercel-visit-preview-link.png) + +When your pull request is ready for review, add @erinkcochran87 as a reviewer. You can also add other Dagster maintainers, but Erin is required for all documentation reviews. + +--- + +## Authoring + +In this section, we’ll cover the different types of content in the Dagster docs and how to work with the site navigation. + +The Dagster docs have two types of doc content: + +- [General][docs-content], which are MDX files in the `/content` folder +- [API][api-docs], which are auto-generated from Python docstrings in the Dagster codebase using Sphinx + +### General/MDX content + +
Relevant CLI commands + +| Command | Run location | Usage | Description | +| :--- | :--- | :--- | :--- | +| make next-dev-install | /docs | Local dev | Sets up the site's environment. Only required after the initial install. | +| make next-watch-build | /docs | Local dev | Runs the development server on `localhost:3001`. Watches MDX files in `/content` and site code in `/docs/next`. | +| make mdx-format | /docs | Content | Runs [/docs/next/scripts/mdx-transform.ts][mdx-format-source], which formats MDX and React code. This includes standardizing MDX content, inserting code snippet content into code blocks, transforming HTML `img`/Markdown images into React `Image` components. Run immediately prior to committing changes to prevent a Buildkite error.

**Note:** If you’ve edited code snippets, you should run `make black isort` before running this and committing changes. Our CI checks the contents of code snippets in MDX against their source files and fails if there’s a mismatch. If running `black` and `isort` make changes to your code snippets, running `mdx-format` last ensures that there won’t be a mistmatch due to formatting. | +| make black | / | Code snippets | Runs [TODO]. If you've added or editing code snippets, run immediately prior to committing changes. | +| make isort | / | Code snippets | Runs [TODO]. If you've added or editing code snippets, run immediately prior to committing changes. | +| yarn test | /docs/next | Content | Runs some [testing scripts][link-tests], which tests the validity of internal and external links. | +
+ +The `/content` directory contains the bulk of the docs’ content, such as tutorials, guides, references, and so on. **Note**: API documentation doesn’t live in this folder - API docs are managed differently than general doc pages. Refer to the [API docs section][api-docs] for more info. + +All general doc pages are saved as `.mdx` files, which are [Markdown](https://www.markdownguide.org/) files that contain YAML Frontmatter and React components. MDX is a format that embeds JSX in Markdown files, allowing us to import [React components][next-components] and use them in Markdown content. For example: + +```markdown + +--- +title: "Getting started with Dagster Cloud | Dagster Docs" +description: "Learn how to get up-and-running with Dagster Cloud." +--- + + + +# Page title + +Content here! + + + + +``` + +Each MDX file in `/content` directly corresponds to a single, rendered docs page where the file path is used to construct the page’s final URL. For example: - [**Prose docs**](#prose-docs) make up the majority of the docs site: Tutorials, Concepts, Guides, etc. All prose docs live directly in the `content` folder as `mdx` files (markdown with React components). You should edit these files directly to update prose docs. - [**API docs**](#api-docs) contain the formal specification of Dagster's APIs. The built representation of the API docs consists of a few large JSON files in `content/api`. These files should not be edited directly-- they are the output of a build process that extracts the docstrings in Dagster source. The primary build tools are [Sphinx](https://www.sphinx-doc.org/en/master/) and its [autodoc extension](https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html). Sphinx processes a set of `rst` files (found in `sphinx/index.rst` and `sphinx/sections`) into JSON, which is then massaged into a (still-JSON) structure that NextJS can understand (`scripts/pack_json.py` script) and written to `content/api`. Note that there is little text in the `rst` files, because their main purpose is to invoke `autodoc` directives which pull in docstrings from all over the dagster codebase. - [**Code snippets**](#code-snippets) are embedded throughout the prose docs. All code snippets used in the docs site live (outside the `docs` folder) in `examples/docs_snippets`. This is just a regular python package, which makes it easy to test and specify dependencies for the snippets. - [**Screenshots**](#screenshots) are image files living in `next/public/images`, typically under a subdirectory corresponding to the page on which they appear. There's no need to manually manage the screenshots in this directory-- instead you can add a specification your screenshot to `screenshots` and run `dagster-ui-screenshot capture` to quasi-automatically generate the screenshot and place it in the appropriate subdirectory of `next/public/images`. - [**Navigation schema**](#navigation-schema) is a JSON file specifying the contents of the main nav for the site (found in the left sidebar). This typically needs to be updated to include a link when new prose doc is added. +- [**Dagster University**](#dagster-university) is a separate Nextjs site containing the content for Dagster University courses. + +Refer to the [Development section][docs-development] for info about committing and pushing MDX changes. + +### Page elements / components + +In [`/next/components/mdx/MDXComponents.tsx`][next-components-source], you’ll find the React components available for use in MDX files. **Note**: Other components in `/next/components` can’t be used in MDX files. To use a component in an MDX file, **it must be exported from `MDXComponents.tsx`.** + +As MDX doesn’t support imports, components aren’t imported into the MDX file in which they’re used. Instead, the full set of `MDXComponents` components is injected into the build environment when the MDX is rendered to HTML, which happens in [`/next/pages/[...page].tsx`][next-page-tsx]. + +> ‼️ **Making updates to `MDXComponents.tsx`** +> +> This file must be treated as a stable API, which means only **additive** changes are allowed. Breaking changes will break the docs site. +> +> This is due to how the site handles versioning: A single deployment of the docs site contains both the latest version **and** all older versions. Older versions are pulled from Amazon S3 at build time, meaning that the sole `MDXComponents.tsx` instance must be compatible across all doc versions. This means that even if all callsites in the current MDX files are updated after making a breaking change, there are still callsites in the archived versions on S3 that can’t be updated. +> +> TL:DR; No breaking changes to `MDXComponents.tsx`, or you’ll break the site. 😟 + +The following sections contain info about the most commonly used components, how to use them, and the impact `make mdx-format` has when run. + +| Component | Description | +| --- | --- | +| [PyObject][component-pyobject] | Creates auto-linking references to API docs in MDX files. | +| [Image][component-image] | Embeds images in MDX files. | +| [Code snippet][component-code-snippet] | Embeds code snippets in MDX files. | +| [Callout][component-callout] | Creates callout boxes in MDX files. | +| [Article list][component-article-list] | Creates a two-column list of links in MDX files. | +| [Tabs][component-tabs] | Creates a tabbed interface in MDX files. | + +#### PyObject + +The most commonly used component is the `PyObject` component. When used, this component automatically creates references to the API docs from MDX files. + +`PyObject` should be used whenever possible, as it: + +- Pulls object names directly from the code base, ensuring the correct name is always used +- Simplifies linking to supporting API reference content + +The following table describes the properties accepted by `PyObject` and demonstrates how to use them: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameRequired?DescriptionExample
objectYesThe name of the Python object. By default, the object name is also used as the display text./td> + + +``` +Example: + -## Prose docs +Result: +[Definitions](/_apidocs/definitions#Definitions) +``` -We use MDX, which is a format that embeds JSX in markdown documents. This lets us import React components and embed them in our doc pages written in markdown. All of our prose docs live in `content`, where they are grouped into a few directories corresponding to the categories of docs shown in the left sidebar: `concepts`, `guides`, etc. There is a 1-1 correspondence of MDX files to pages in these categories. +
moduleYes* -Just as with code, we apply checks in CI to maintain the quality of our MDX files. Specifically, the full corpus of MDX is processed with `make mdx-format` and diffed against the source. If any discrepancies are found, the CI step will fail. Therefore you should always run `make mdx-format` before pushing MDX changes. `make mdx-format` processes all MDX files in `content`, performing several transformations (additional transformations can be added if needed): +Required if the object exists outside of the core Dagster module. The name of the Python module containing the object. For example, `dagster_airbyte` -- Formats MDX markdown and React code using [Prettier](https://prettier.io/). -- Transforms raw HTML `img` tags into special `Image` components (see [here](#embedding-images)). -- Resolves references to external code snippets and overwrites the corresponding code blocks with the external snippet. + + +``` +Example: + -See the subsections below for details. +Result: +[DbtCliResource](/_apidocs/libraries/dagster-dbt#dagster_dbt.DbtCliResource) +``` -### React components +
displayTextNo + +If provided, the value of `displayText` will be used as display text instead of the object’s name. + + + +``` +Example: + -The set of React components available to our MDX files is defined in `next/components/mdx/MDXComponents.tsx`. Note that the various other components defined in `next/components` _are not available to MDX_; if you want to use a component in MDX, it must be exported from `MDXComponents.tsx`. +Result: +[code location](/_apidocs/definitions#Definitions) +``` -MDX doesn't support imports, so components aren't imported into the MDX file in which they are used. Instead, the full set of `MDXComponents` components is injected into the build environment when the MDX is rendered to HTML (this happens in `next/pages/[...page].js`). +
pluralizeNo + +If provided, the object (or `displayText`, if provided) will be pluralized. + + + +``` +Example: + -You should browse `MDXComponents.tsx` to get a sense of what's available, but -the most commonly used component is the `` component. It is used to -link to references in the API docs from MDX files: +Result: +[code locations](/_apidocs/definitions#Definitions) +``` +
methodNoIf provided, the object value will be formatted as a Python method.
decoratorNoIf provided, the value of object will be formatted as a Python decorator. + ``` -Here is an example of using PyObject: -By default, we just display the object name. To override, use the `displayText` prop: +Example: + + +Result: +[@asset](/_apidocs/assets#dagster.asset) ``` -Finally, a note on updates to `MDXComponents.tsx`-- this has to be treated as a stable API, which means additive changes to components are allowed but breaking changes are not. This is because a single deployment of the docs site contains both the latest and older versions of the docs. The older versions are pulled off of Amazon S3 at build time. This means the sole `MDXComponents` instance needs to be compatible with both the current set of MDX files _and_ much older versions. For this reason, even if you update all callsites in the current MDX files after making a breaking change to a component, you will still break the docs site-- there are also callsites in the older docs versions archived on S3 that you cannot update. +
+ + +### Images -### Embedding images +#### General -NextJS provides a special `Image` component that optimizes image-loading in production. We shadow and wrap this component with our own `Image` component defined in `MDXComponents`, which (1) adjusts the source path to account for version (images that are referenced in older doc versions but no longer present in `next/public/images` need to be pulled from S3); (2) wraps the image in a `Zoom` component, allowing image zoom on click. +Images: -Images may be sourced from either remote URLs or the site's static assets, which live in `next/public`. Paths to static images are rooted at `next/public`, e.g. `/images/some-image.png` refers to `next/public/images/some-image.png`. The vast majority of images used in the site are stored in `next/public/images` (see [Images and screenshots](#images-and-screenshots). +- Can be sourced from either remote URLs or the site’s static assets (`/docs/next/public/images`) +- Used in MDX files with either Markdown or HTML syntax +- Should have descriptive `alt` text +- Should have descriptive file names. For example: `dagit-asset-catalog-stale-assets.png` +- Should be used with sparingly and with intention, especially for UI screenshots (of Dagit or any other application). Being selective helps reduce the number of outdated and potentially confusing images in the docs. -All images embedded in docs pages should use `MDXComponents.Image`, as opposed to a regular HTML `img` tag. Annoyingly, this component (due to the demands of NextJS `Image`) requires an explicit width and height to be set. Fortunately, we can automate the pulling of this information off the file. You can add simple `` tags when writing docs, and they will be automatically converted to `MDXComponents.Image` when running `make mdx-format`. A `300x200` image stored in `next/public/images/some-image.png` could be referenced like this: +Paths to static images are rooted at `/next/public`, e.g. `/images/some-image.png` refers to `/next/public/images/some-image.png`. The vast majority of images used in the site are stored in `/next/public/images`. +Markdown or HTML syntax can be used to add images to MDX files. For example: + +```markdown +## Markdown +![Alt text](/images/some-image.png) + +## HTML +Alt text ``` - + +#### make mdx-format transformation + +When `make mdx-format` is run, all images formatted using Markdown or HTML will be converted to a React `Image` component, defined in `MDXComponents.tsx`. This custom component wraps a special NextJS `Image` component that optimizes image loading in production, and does the following: + +1. Adjusts the source path to account for version. Specifically, images referenced in older doc versions but are no longer present in `next/public/images` need to be pulled from S3. +2. Wraps the image in a `Zoom` component, allowing image zoom on click + +Additionally, our custom `Image` component - due to the demands of NextJS `Image` - requires explicit width and height to be set for every image. When `make mdx-format` is run, the script will detect image tags, pull image widths and heights from file information, and convert them to `Image` components. + +For example, the above example images would be transformed to: + +```markdown +Alt text ``` -And `make mdx-format` would transform it into this: +> ‼️ A limitation of `make mdx-format` is that width and height changes aren’t detected in `Image` components. This means that if an image is replaced and its dimensions have changed, you’ll need to first format the image using Markdown or HTML and then run `make mdx-format` to pull the correct dimensions. + +### Code snippets + +
Relevant CLI commands + +| Command | Run location | Usage | Description | +| --- | --- | --- | --- | +| make black | / | Code snippets | Runs [TODO]. If you've added or editing code snippets, run immediately prior to committing changes. | +| make isort | / | Code snippets | Runs [TODO]. If you've added or editing code snippets, run immediately prior to committing changes. | +| make mdx-format | /docs | Content | Runs [`/next/scripts/mdx-transform.ts`][mdx-format-source], which formats MDX and React code. This includes standardizing MDX content, inserting code snippet content into code blocks, transforming HTML `img`/Markdown images into React `Image` components. Run immediately prior to committing changes to prevent a Buildkite error.

**Note**: If you’ve edited code snippets, you should run make black isort before running this and committing changes. Our CI checks the contents of code snippets in MDX against their source files and fails if there’s a mismatch. If running black and isort does make changes to your code snippets, running mdx-format last ensures that there won’t be a mistmatch due to formatting. | +
+ +#### General +Code snippets: + +- Live in `/../examples/docs_snippets`. This is a Python package that allow us to test and specify dependencies for the snippets. +- **Should not be authored directly in MDX files.** Code in MDX files is inaccessible to tooling and the Python interpreter, which makes testing and maintenance difficult. +- Should (generally) have an accompanying test in `/../examples/docs_snippets/docs_snippets_tests`. Our CI runs these tests against code snippets in `/../examples/docs_snippets/docs_snippets` and will fail if errors occur. + +To reference a code snippet, create an empty code block: + +```` ``` - +python file=/concepts/assets/asset_dependency.py startafter=start_marker endbefore=end_marker ``` +```` -### Embedding external code snippets +And provide the following properties: -Code snippets should not be authored directly in MDX files. This is because code in an MDX file is inaccessible to tooling and the Python interpreter, which makes it difficult to test and maintain. Our solution to this is to author code snippets in an external Python package (`examples/docs_snippets`) and reference these snippets from MDX files. This is done by creating an empty code block and providing the properties that act as an "address" for the code snippet within `docs_snippets`: +- `file` - The file path to the code snippet file, relative to `/../examples/docs_snippets/docs_snippets` +- `startafter` - **Optional**. Useful if including multiple snippets in a single file. This property defines the starting point of the code snippet; everything between this marker and `endbefore` will be included. +- `endbefore` - **Optional**. Useful if including multiple snippets in a single file. This property defines the ending point of the code snippet; everything between this marker and `startafter` will be included. +- `trim` - **Optional**. If `false`, extra whitepsace at the ends of the snippet will be preserved. Whitespace is removed by default unless otherwise specified. +- `dedent` - **Optional**. Trims the specified number of leading spaces from each line in the snippet. For example, `dedent=4` would trim `4` spaces from each line in the snippet. This is useful for showing an isolated method (indented in a class body) as a snippet. - ```python file=/some/file/in/docs_snippets.py startafter=a_start_marker endbefore=an_end_marker - ``` +The above example points to the following section of `/../examples/docs_snippets/docs_snippets/concepts/assets/asset_dependency.py`: + +https://github.com/dagster-io/dagster/blob/b2b5f563069d476f519da5f188b47ba14c014495/examples/docs_snippets/docs_snippets/concepts/assets/asset_dependency.py#L5-L16 + +#### make mdx-format transformation + +When you run `make mdx-format`, the script will: + +- Inject the referenced snippet into the code block. **Note:** Existing code block content will be overwritten - don’t iterate on your code in an MDX file, or you’ll lose it! +- Remove extra whitespace between the ends of the snippet +- If `startafter` and `endbefore` properties were provided, only the code between them will be injected + +### Callouts + +Callouts are useful for calling attention to small chunks of content. There are currently two types of callout components you can use: `Note` and `Warning`. + +`Note` is suitable for calling attention to info that is helpful or important but will not result in something breaking if ignored. For example, indicating a page is specific to Dagster Cloud. + +```markdown + + Content goes here. Any formatting must be done with HTML. + +``` + +Unlike `Note`, `Warning` should be used sparingly and only when absolutely necessary. `Warning` is meant to convey a sense of urgency, highlighting information that the user must be aware of or they’ll have a bad time. For example, changing a setting in prod without testing. + +```markdown + + Don't pizza when you should french fry! + +``` + +### Article list + +The `ArticleList` component creates two-column list of links, which is useful for displaying long link lists in a more tidy fashion. The `ArticleList` component accepts child `ArticleListItem` components, where each item is an individual link. For example: + +```markdown + + + + +``` + +The `ArticleListItem` component accepts the following properties, **all of which are required**: + +- `title` - The display text for the link +- `href` - A relative or external URL. Anchor links (`#some-heading`) are also acceptable. + +### Tabs + +The `TabGroup` component creates a tabbed interface, which is useful for condensing the display of lengthy content. It’s generally recommended to use tabs when presenting content that is “either-or”, **not sequential**. For example, different ways to load a code location using the `dagster` CLI. + +The `TabGroup` component accepts child `TabItem` components, where each item is a tab. For example: + +```markdown +## Loading code locations locally -The above points to a section of `examples/docs_snippets/docs_snippets/some/file/in/docs_snippets.py` delineated by line-comments specified by `startafter` and `endbefore` (note: both of these properties are optional): + + + ... some content ... + + + ... some more content ... + + - ### examples/docs_snippets/docs_snippets/some/file/in/docs_snippets.py +**Markdown content is supported**... - ... code - # a_start_marker - ... code between a `a_start_marker` and `an_end_marker` is included in the snippet +But must be flush-left **and** have beginning and ending new lines to render correctly. - # an_end_marker - ... code + +``` + +### API docs + +#### General + +
Relevant CLI commands + +| Command | Run location | Usage | Description | +| --- | --- | --- | --- | +| make apidoc-build | /docs | Local API docs dev | | +| make apidoc-watch-build | /docs | Local API docs dev | | + +
+ +The API docs contain the formal specification of Dagster’s APIs. These docs are built with [Sphinx](https://www.sphinx-doc.org/en/master/) and its [autodoc extension](https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html), which we use to document Python methods and classes by parsing docstrings in the Dagster codebase. -Note that by default, extra whitespace between the ends of your snippet and the markers will be removed before injection into the MDX code block. You can pass the additional property `trim=false` to preserve the whitespace. You can also pass `dedent=` to trim a leading number of spaces from each line in the snippet-- this is useful when you want to show e.g. an isolated method (indented in a class body) as a snippet. +Updates to API docs are made to the `.rst` files in `/sphinx`. Each Dagster concept and library has its own `.rst` file. While there is some copy in these files, their main purpose is to invoke `autodoc` directives. These directives pull in docstrings from the Dagster codebase, which Sphinx uses to generate the corresponding API documentation. The end result are the JSON files in `/content/api`. -Running `make mdx-format` will inject referenced snippets into your code blocks. This will process _all_ MDX files in `content` and overwrite any existing body of any code block with a snippet reference. So be careful not to iterate on your code blocks inline in MDX-- always edit them in `docs_snippets`. +If you make updates to `.rst` files in `/sphinx`, you’ll need to rebuild the API docs to have your changes reflected on the docs site. Refer to the [Building the API docs section](#building-the-api-docs) for more info. -## API docs +### Formatting in reStructuredText -The API documentation is authored in the `docs/sphinx` folder and built using [Sphinx](https://www.sphinx-doc.org/en/master/), a Python document generator. We use Sphinx because it has built-in functionality (the [autodoc extension](https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html)) to document Python methods and classes by parsing docstrings. +While formatting in `.rst` (reStructuredText) files is similar to Markdown, there are some differences. Refer to the [reStructuredText formatting reference][reference-markdown-rst] for examples of commonly used formats, including bold, italics, code, and links. -If you update the API documentation in the `docs/sphinx` folder, you need to rebuild the output from Sphinx in order to see your changes reflected in the documentation site. This is enforced by our CI system, which will both verify that the docs build successfully and that the build output matches what is checked into the repository. This prevents merging docs source changes without the corresponding build. +### Building the API docs -The build is a two-step process. First, `sphinx-build` builds JSON (written to `docs/sphinx/_build/json`) from the `rst` source files in `docs/sphinx` (`index.rst` and everything in `sections`) (note that this JSON merely wraps Sphinx' typical HTML output, so you can still use Sphinx extensions to transform HTML). Second, we run a supplemental script `scripts/pack_json.py` that does two things: (a) transforms the Sphinx JSON into a form legible to NextJS; (b) writes the transformed JSON to `docs/content/api`. The final output is three JSON files in `docs/content/api`: `modules.json`, `searchindex.json`, and `sections.json`. +If you make changes to any `.rst` files in `/sphinx`, you’ll need to rebuild the API docs to have your changes reflected on the docs site. This is enforced by CI, which both verifies that the docs built successfully and that the build output matches what’s checked into the `dagster-io/dagster` repository. This prevents merging docs source changes without the corresponding build. -The most convenient way to build the API docs is to run `make apidoc-build` (from `dagster/docs`). This will execute the Sphinx build step using the `sphinx` tox environment (see `tox.ini`) and then run `scripts/pack_json.py` (only if the sphinx build executed with no warnings/errors). Note that it is important to use `tox` to run Sphinx rather than your normal dev environment-- that's because our Sphinx configuration requires a different set of packages than are installed by `scripts/install_dev_python_modules.py`. +The overall build process looks like the following, which is described in more detail following the diagram: -If you are making changes to the API docs, you can use `make apidoc-watch-build` together with `make dev`. `apidoc-watch-build` uses the file watcher `watchmedo` (should be installed in your dev env from `watchdog`) to re-run `make apidoc-build` every time either RST source or the sphinx configuration file changes. If it completes successfully, the updated API doc JSON will be picked up by the NextJS file watcher for `make dev` and trigger live reload of the docs site. This is very useful for experimenting with sphinx options and extensions. +![API docs workflow](/next/public/images/readme/api-docs-workflow.png) + +1. Python docstrings are added to the Dagster codebase. While not all updates to the API docs will include this step, we’re including it for completeness. +2. Updates are made to `.rst` files in `/sphinx`. +3. You run one of the following: + 1. `make apidoc-build` - This command executes the Sphinx build step. + 2. `make apidoc-watch-build` - This command is identical to `make apidoc-build`, but also starts the `watchmedo` file watcher. Every time an `.rst` file or the Sphinx configuration file is modified, `watchmedo` will automatically re-run `make apidoc-build`. If the build finishes successfully, NextJS will auto-reload your local docs server. +4. Using the `sphinx` tox environment (`tox.ini`), Sphinx extracts docstrings from the Dagster codebase using `autodoc` directives in `.rst` files to build JSON (`/sphinx/_build/json`). +5. Next, the `/scripts/pack_json.py` script is run. This script does two things: + 1. Transforms the Sphinx JSON into a NextJS-friendly format + 2. Writes the transformed JSON to `/content/api`, with the final output being three JSON files: + 1. `modules.json` - Used to render individual API pages. Each page corresponds to a single `.rst` source file. + 2. `searchindex.json` - Used to power the [`PyObject`][component-pyobject] component + 3. `sections.json` - Used to render `/content/_apidocs.mdx` ### Mocking runtime dependencies -The Python environment in which Sphinx runs requires all targeted modules to be available on `sys.path`. This is because `autodoc` actually imports the modules during build (to access their docstrings). This means the Sphinx build process will also import anything else imported in the targeted modules. Thus `Sphinx` requires the entire graph of imports discoverable from your targets to be available on `sys.path`. +The Python environment in which Sphinx runs requires all targeted modules to be available on `sys.path`. This is because `autodoc` actually imports the modules during build, to access their docstrings. This means the Sphinx build process also imports anything else imported in the targeted modules. Thus, Sphinx requires the entire graph of imports discoverable from your targets to be available on `sys.path`. + +The simplest way to achieve this is to install all of the target packages with `pip` into the Sphinx build environment, just as you would for a runtime environment. The problem is that, not all the packages you are targeting in the build can necessarily be installed into the same environment. That's often the case with our API docs: a single run of Sphinx builds the API docs for all Dagster packages and some of those extensions can have conflicting requirements. -The simplest way to achieve this is to just install all of the target packages with `pip` into the Sphinx build environment, just as you would for a runtime environment. The problem is that, not all the packages you are targeting in the build can necessarily be installed into the same environment. That's often the case with our API docs-- a single run of Sphinx builds the API docs for _all_ dagster packages, and some of those extensions can have conflicting requirements. +However, the `autodoc_mock_imports` configuration option in `autodoc` can help in this scenario. Every package listed in `autodoc_mock_imports` will be mocked during the Sphinx build, meaning that all imports from that package will return fake objects. **Note:** `autodoc_mock_imports` shadows the actual build environment. For example, if you have `foo` installed in the environment but `foo` is also listed in `autodoc_mock_imports`, imports will return mocks. -`autodoc` provides a configuration option that can help here: `autodoc_mock_imports`. Every package listed in `autodoc_mock_imports` will be mocked during the Sphinx build, meaning that all imports from that package will return fake objects. Note that `autodoc_mock_imports` shadows the actual build environment-- if you have `foo` installed in the environment but `foo` is also listed in `autodoc_mock_imports`, imports will return mocks. +Ideally, we’d be able to put all build target (Dagster packages) dependencies in `autodoc_mock_imports`, but this isn’t a foolproof solution. If a dependency is used at import time and it’s mocked, a Python error will occur during the Sphinx build. This is because your code will encounter a mocked object where it expects a different value from the library. -In a simpler world, we could simply put all dependencies of our build targets (Dagster packages) in `autodoc_mock_imports`. Unfortunately, it doesn't always work. If a dependency is used at import time and it is mocked, you will get an obscure Python error during the Sphinx build, as your code encounters a mocked object where it expects a different value from the library. For example, this is likely to crash the build because the value of `SOME_CONSTANT` is actually being _used_ at import time: +For example, this is likely to crash the build because the value of `SOME_CONSTANT` is actually being used at import time: -```python +``` from some_mocked_lib import SOME_CONSTANT do_something(SOME_CONSTANT) ``` -Whereas if `SOME_CONSTANT` is merely imported, then we can get away with mocking `some_mocked_lib`. +If `SOME_CONSTANT` were merely imported, then mocking `some_mocked_lib` would work. -The solution used for our Sphinx environment is a compromise. We do a full editable install for any dagster package that uses any of its deps at build time. All other deps are mocked (see `autodoc_mock_imports` in `sphinx/conf.py`). This is not optimal in terms of having a lean build environment, because it brings in all other deps of a dagster package even if only one is used at import time. But it keeps our build environment simple, whereas a more targeted approach would have to cherry pick assorted deps and keep their version specifiers in sync with the corresponding dagster package. +The solution used for our Sphinx environment is a compromise: Any Dagster package that uses any dependencies at build time is a full editable install, and all other dependencies are mocked. Refer to `autodoc_mock_imports` in `/docs/sphinx/conf.py` for more info. -## Images and screenshots +While not ideal for having a lean build environment, as it brings in all other dependencies of a Dagster package even if only one is used at import time -n it keeps the build environment simple. A more targeted approach would be cherrypicking assorted dependencies and keeping their version specifiers in sync with the corresponding Dagster package. -All non-remotely-sourced images should be stored in `next/public/images`. This directory is organized in a hierarchy that matches the structure of the prose docs, which keeps clear which images are used in which docs. For instance, images used in the `concepts/ops-jobs-graphs` pages are typically stored in `images/concepts/ops-jobs-graphs`. +### Site navigation + +The site navigation, which specifies the contents of the left sidebar, is maintained in [`/content/_navigation.json`][docs-navigation-source]. For example: + +https://github.com/dagster-io/dagster/blob/b2b5f563069d476f519da5f188b47ba14c014495/content/_navigation.json#L2-L25 + +Generally, you should update the navigation when: + +- An MDX file is added or removed +- An MDX file is moved to a new location +- An API doc is added or removed +- The title of a page substantially changes + +### Redirects + +Redirects are maintained in [`/next/util/redirectUrls.json`][docs-redirect-source]. Each entry contains a `source`, `destination`, and a `statusCode`. For example: + +https://github.com/dagster-io/dagster/blob/7854e62f861a80acbf3d093f3a614057c910c0f4/docs/next/util/redirectUrls.json#L2-L6 + +When a visitor navigates to the `source` URL, they’ll automatically be redirected to the `destination` URL. + +Generally, you should add a redirect when: + +- An MDX file is moved to a new location +- An MDX file is consolidated with another MDX file + +--- + +## Releasing and versioning + +The docs are updated with every release, which is currently weekly on Thursday. [docs.dagster.io/master](http://docs.dagster.io/master) is updated on every commit to `dagster-io/dagster`'s `master` branch. + +The difference is because the docs use a custom versioning logic to account for differences in Dagster versions: -### Screenshots +- The `master` version points to files in `/content` +- Older versions render MDX files and images from a remote Amazon S3 bucket + +During every release, the newly-released content is uploaded to the S3 bucket. This results in the latest version of the docs (`docs.dagster.io`) updating on every release as opposed to every `master` push. + +--- + +## Useful tools + +### Automating screenshots with dagit-screenshot + +Screenshots are image files living in `/next/public/images`, typically under a subdirectory corresponding to the page on which they appear. There's no need to manually manage the screenshots in this directory-- instead you can add a specification your screenshot to `screenshots` and run `dagit-screenshot capture` to quasi-automatically generate the screenshot and place it in the appropriate subdirectory of `next/public/images`. + +All non-remotely-sourced images should be stored in `next/public/images`. This directory is organized in a hierarchy that matches the structure of the prose docs, which keeps clear which images are used in which docs. For instance, images used in the `concepts/ops-jobs-graphs` pages are typically stored in `images/concepts/ops-jobs-graphs`. Most of the site's images are screenshots of the Dagster UI. There is a semi-automated system in place to ease the generation and maintenance of screenshots. Screenshots are specified in "specs" which are stored in YAML files in the `screenshots` directory. This directory contains a "screenshot spec database", which is just a tree of YAML files that matches the hierarchical structure of `content`. These files are intended to be read by the command-line tool `dagster-ui-screenshot`, which generates and writes screenshot image files to `next/public/images`. See `dagster-ui-screenshot/README.md` for details. -## Navigation schema +--- + +## References + +- [Markdown and reStructuredText formatting][reference-markdown-rst] +- [CLI commands][reference-cli-commands] + +### Markdown and reStructuredText formatting + +The following table includes the most commonly-used formats in Markdown and reStructuredText (`.rst` files, used for API documentation). Refer to the official documentation for either language for full references: [Markdown](https://daringfireball.net/projects/markdown/syntax), [Sphinx](https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
DescriptionMarkdownreStructuredText
+ +[Italics](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#inline-markup) + + + +`_Text_` + + + +`*Text*` + +
+ +[Bold](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#inline-markup) + + + +`**Text**` + + + +`**Text**` + +
+ +[Code (inline)](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#inline-markup) + + + + `Text` + + + + `Text` + +
+Code block + + +```` +```language +``` +```` + + +
Link (relative) + +`[Link text](/path/to/page)` + + + +``` +`Link text `_ +``` -If you are adding a new prose page or want to update the navigation in the sidebar, update the `docs/content/_navigation.json` file. The structure is self-explanatory when looking at the sidebar. +
Link (external) + + +`[Link text](https://some-link.com)` + + + +``` +`Link text `_ +``` + +
List (bullet) + +``` +- one +- two + - nested list + - nested list 2 +- three +``` + + + +``` +* one +* two + + * nested list + * nested list 2 + +* three +``` + +
List (numbered) + +``` +1. one +2. two +``` + + + +``` +1. one +2. two +``` + +Or + +``` +#. one +#. two +``` + +
+ +Quote + + + +``` +> Text text text +``` + + + +``` +> Text text text +``` + +
+ +### CLI commands + +| Command | Run location | Usage | Description | +| :--- | :--- | :--- | :--- | +| make next-dev-install | /docs | Local setup | Sets up the site's environment. Only required during initial setup and installation. | +| make next-watch-build | /docs | Local dev | Runs the development server on `localhost:3001`. Watches MDX files in `/content` and site code in `/next`. | +| make mdx-format | /docs | Content | Runs [/docs/next/scripts/mdx-transform.ts][mdx-format-source], which formats MDX and React code. This includes standardizing MDX content, inserting code snippet content into code blocks, transforming HTML img/ Markdown images into Image components. Run immediately prior to committing changes to prevent a Buildkite error.

**Note**: If you’ve edited code snippets, you should run `make black isort` before running this and committing changes. Our CI checks the contents of code snippets in MDX against their source files and fails if there’s a mismatch. If running `black` and `isort` does make changes to your code snippets, running `mdx-format` last ensures that there won’t be a mistmatch due to formatting. | +| make black | / | Code snippets | Runs Black, a code formatting CLI utility. Invokes `black` in `/Makefile`
If you've added or editing code snippets, run prior to running `make mdx-format`. Refer to the [Development section][docs-development] for more info. | +| make isort | / | Code snippets | Runs `isort`, a Python code formatting utility. Invokes `isort` in `/Makefile`
If you've added or editing code snippets, run prior to running `make mdx-format`. Refer to the [Development section][docs-development] for more info. | +| pytest .py | | Code snippets | Runs `pytest` against the provided Python file. Used to test code snippets. | +| yarn test | /docs/next | Content | Runs [internal test scripts][link-tests], which tests the validity of internal and external links. | +| make apidoc-buildsphinx | | API docs | | +| make apidoc-watch-build | | API docs | | +| sphinx build | | API docs | | + +--- + +## Dagster University + +Refer to the Dagster University [README](https://github.com/dagster-io/dagster/tree/master/docs/dagster-university) for more info about working in this directory. + +--- ## Troubleshooting -### Problem: ModuleNotFoundError: No module named 'X' +- [Error: ModuleNotFoundError: No module named 'X'] + +### Error: ModuleNotFoundError: No module named 'X' + +Related to the [API docs][api-docs]. + +The following error occurs after a new library is added as a dependency in a Dagster package. It could have been done by your or in another commit. The issue is that the library isn't present in your `tox` environment. -**Example stack trace**: +Example stack trace: ``` dagster/docs $ make apidoc-build @@ -158,20 +886,84 @@ File "/Users/jamie/dev/dagster/python_modules/dagster/dagster/_cli/workspace/cli ModuleNotFoundError: No module named 'tomli' ``` -This error would occur after a new library has been added as a dependency in a dagster package. It could be done by you, or by another commit. The issue is that the library is not present in your tox environment. +To resolve the error, you need to rebuild your `tox` environment and bring in the missing dependency. To do this: -**Solution**: +- To rebuild the environment for every `tox` command, run the following in `/docs`: -You need to rebuild your tox environment to bring in the missing dependency. To do this, run: + ```shell + tox -r + ``` -``` -dagster/docs $ tox -r -``` +- To rebuild and run only the Sphinx command, run the following in `/docs`: -to rebuild the environment for every tox command, or run: + ``` + tox -re sphinx + ``` + -``` -dagster/docs $ tox -re sphinx -``` + + +[docs-content]: #general-mdx-content +[docs-content-source]: https://github.com/dagster-io/dagster/tree/master/docs/content + +[docs-navigation]: #site-navigation +[docs-navigation-source]: https://github.com/dagster-io/dagster/blob/master/docs/content/_navigation.json + +[docs-redirect]: #redirects +[docs-redirect-source]: https://github.com/dagster-io/dagster/blob/master/docs/next/util/redirectUrls.json + +[docs-development]: #development +[docs-development-local]: #local +[docs-pull-requests]: #pull-requests + +[docs-page-architecture]: #page-architecture +[docs-site-structure]: #directory-structure + +[docs-code-snippets]: #code-snippets +[docs-site-technologies]: #site-technologies + + + +[api-docs]: #api-docs +[api-apidocs-source]: https://github.com/dagster-io/dagster/blob/master/docs/content/_apidocs.mdx +[api-folder]: https://github.com/dagster-io/dagster/tree/master/docs/content/api + + +[api-sphinx-source]: https://github.com/dagster-io/dagster/tree/master/docs/sphinx + + + + +[mdx-format-source]: https://github.com/dagster-io/dagster/blob/master/docs/next/scripts/mdx-transform.ts + +[next-folder]: https://github.com/dagster-io/dagster/tree/master/docs/next +[next-assets]: https://github.com/dagster-io/dagster/tree/master/docs/next/public + +[next-images]: #images +[next-images-source]: https://github.com/dagster-io/dagster/tree/master/docs/next/public/images + +[next-components]: #page-elements-components +[next-components-source]: https://github.com/dagster-io/dagster/blob/master/docs/next/components/mdx/MDXComponents.tsx + +[component-article-list]: #article-list +[component-callout]: #callouts +[component-code-snippet]: #code-snippets +[component-image]: #images +[component-pyobject]: #pyobject +[component-tab]: #tabs + +[next-page-tsx]: https://github.com/dagster-io/dagster/blob/master/docs/next/pages/%5B...page%5D.tsx +[next-versioned-content]: https://github.com/dagster-io/dagster/tree/master/docs/next/.versioned_content + +[link-tests]: https://github.com/dagster-io/dagster/tree/master/docs/next/__tests__ + + + + +[dagit-screenshot-source]: https://github.com/dagster-io/dagster/tree/master/docs/dagit-screenshot +[dagit-screenshot-config]: https://github.com/dagster-io/dagster/tree/master/docs/screenshots + + -to rebuild and run just the sphinx command +[reference-cli-commands]: #cli-commands +[reference-markdown-rst]: #markdown-and-restructuredtext-formatting \ No newline at end of file diff --git a/docs/content/_apidocs.mdx b/docs/content/_apidocs.mdx index 54d72b8d9383b..b89795bba5629 100644 --- a/docs/content/_apidocs.mdx +++ b/docs/content/_apidocs.mdx @@ -35,14 +35,11 @@ APIs from the core `dagster` package, divided roughly by topic: - Software-defined Assets + Asset definitions - APIs to define data asset's using Dagster's{" "} - - Software-defined Assets - - . + APIs to define data + assets. @@ -59,8 +56,7 @@ APIs from the core `dagster` package, divided roughly by topic: Schedules & Sensors - APIs to define{" "} - schedules{" "} + APIs to define schedules{" "} and sensors{" "} that initiate job execution, as well as some built-in helpers for common cases. @@ -209,7 +205,7 @@ APIs from the core `dagster` package, divided roughly by topic: APIs for working with the{" "} - Dagster Pipes protocol from the + Dagster Pipes protocol from the orchestration side. @@ -242,21 +238,6 @@ APIs from the core `dagster` package, divided roughly by topic: machinery, storage, schedulers. - - - Job-level versioning & memoization{" "} - - - - - Deprecated in favor of{" "} - - asset versioning - - - . Code versioning and memoization of previous outputs based upon that versioning. - - Repositories @@ -313,16 +294,6 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi Dagster integrations to run Airbyte jobs. - - - Airflow ( - dagster-airflow) - - - Tools for compiling Dagster jobs to Airflow DAGs, and for ingesting - Airflow DAGs to represent them in Dagster. - - AWS ( @@ -352,8 +323,8 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - Celery+Docker ( - dagster-celery-docker) + Celery & Docker{" "} + (dagster-celery-docker) Provides an executor that lets Celery workers execute in Docker @@ -362,8 +333,8 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - Celery+Kubernetes ( - dagster-celery-k8s) + Celery & Kubernetes{" "} + (dagster-celery-k8s) {" "} @@ -439,25 +410,34 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - DuckDB+Pandas ( - dagster-duckdb-pandas) + DuckDB & Pandas{" "} + (dagster-duckdb-pandas) Provides support for storing Pandas DataFrames in DuckDB. - DuckDB+Polars ( - dagster-duckdb-polars) + DuckDB & Polars{" "} + (dagster-duckdb-polars) Provides support for storing Polars DataFrames in DuckDB. - DuckDB+PySpark{" "} + + DuckDB & PySpark + {" "} (dagster-duckdb-pyspark) Provides support for storing PySpark DataFrames in DuckDB. + + + Embedded ELT ( + dagster-embedded-elt) + + Provides support for running embedded ELT within Dagster + Fivetran ( @@ -484,7 +464,7 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - GCP+Pandas ( + GCP & Pandas ( dagster-gcp-pandas) @@ -494,7 +474,7 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - GCP+PySpark ( + GCP & PySpark ( dagster-gcp-pyspark) @@ -542,6 +522,16 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi Provides components for deploying Dagster to Kubernetes. + + + Looker ( + dagster-looker) + + + Provides an integration to represent a Looker project as a graph of + assets. + + Microsoft Teams ( @@ -602,6 +592,15 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi Provides support for sending Dagster logs to Papertrail. + + + Polars ( + dagster-polars) + + + Provides support for saving and loading Polars DataFrames in Dagster. + + PostgreSQL ( @@ -649,7 +648,7 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - Snowflake+Pandas + Snowflake & Pandas {" "} (dagster-snowflake-pandas) @@ -658,7 +657,7 @@ Dagster also provides a growing set of optional add-on libraries to integrate wi - Snowflake+PySpark + Snowflake & PySpark {" "} (dagster-snowflake-pyspark) diff --git a/docs/content/_navigation.json b/docs/content/_navigation.json index de0750b36b4f2..452a95e02bffd 100644 --- a/docs/content/_navigation.json +++ b/docs/content/_navigation.json @@ -10,8 +10,8 @@ "path": "/getting-started/what-why-dagster" }, { - "title": "Hello, Dagster!", - "path": "/getting-started/hello-dagster" + "title": "Quickstart", + "path": "/getting-started/quickstart" }, { "title": "Installation", @@ -54,11 +54,7 @@ "path": "/tutorial/scheduling-your-pipeline" }, { - "title": "Part 6: Using Dagster to save your data", - "path": "/tutorial/saving-your-data" - }, - { - "title": "Part 7: Connecting to external services", + "title": "Part 6: Connecting to external services", "path": "/tutorial/connecting-to-external-services" }, { @@ -76,15 +72,15 @@ "title": "Assets", "children": [ { - "title": "Software-defined Assets", + "title": "Asset definitions", "path": "/concepts/assets/software-defined-assets" }, { - "title": "Graph-backed Assets", + "title": "Graph-backed asset definitions", "path": "/concepts/assets/graph-backed-assets" }, { - "title": "Multi-assets", + "title": "Multi-asset definitions", "path": "/concepts/assets/multi-assets" }, { @@ -100,11 +96,25 @@ "path": "/concepts/assets/asset-selection-syntax" }, { - "title": "Asset checks (Experimental)", - "path": "/concepts/assets/asset-checks" + "title": "Asset checks", + "path": "/concepts/assets/asset-checks", + "children": [ + { + "title": "Defining & executing checks", + "path": "/concepts/assets/asset-checks/define-execute-asset-checks" + }, + { + "title": "Subsetting asset checks", + "path": "/concepts/assets/asset-checks/subsetting-asset-checks" + }, + { + "title": "Checking data freshness", + "path": "/concepts/assets/asset-checks/checking-for-data-freshness" + } + ] }, { - "title": "External assets (Experimental)", + "title": "External assets", "path": "/concepts/assets/external-assets" } ] @@ -119,15 +129,58 @@ }, { "title": "Schedules", - "path": "/concepts/partitions-schedules-sensors/schedules" + "path": "/concepts/automation/schedules", + "children": [ + { + "title": "Overview", + "path": "/concepts/automation/schedules" + }, + { + "title": "Automating assets", + "path": "/concepts/automation/schedules/automating-assets-schedules-jobs" + }, + { + "title": "Automating ops", + "path": "/concepts/automation/schedules/automating-ops-schedules-jobs" + }, + { + "title": "Examples", + "path": "/concepts/automation/schedules/examples" + }, + { + "title": "Partitioned schedules", + "path": "/concepts/automation/schedules/partitioned-schedules" + }, + { + "title": "Customizing timezones", + "path": "/concepts/automation/schedules/customizing-executing-timezones" + }, + { + "title": "Testing", + "path": "/concepts/automation/schedules/testing" + }, + { + "title": "Troubleshooting", + "path": "/concepts/automation/schedules/troubleshooting" + } + ] }, { "title": "Sensors", "path": "/concepts/partitions-schedules-sensors/sensors" }, { - "title": "Auto-materialization policies", - "path": "/concepts/assets/asset-auto-execution" + "title": "Declarative Automation (Experimental)", + "children": [ + { + "title": "Overview", + "path": "/concepts/automation/declarative-automation" + }, + { + "title": "Customizing automation conditions", + "path": "/concepts/automation/declarative-automation/customizing-automation-conditions" + } + ] }, { "title": "Asset Sensors", @@ -194,7 +247,13 @@ }, { "title": "Dagster UI", - "path": "/concepts/webserver/ui" + "path": "/concepts/webserver/ui", + "children": [ + { + "title": "User settings & preferences", + "path": "/concepts/webserver/ui-user-settings" + } + ] }, { "title": "Logging", @@ -268,10 +327,80 @@ { "title": "Job execution", "path": "/concepts/ops-jobs-graphs/job-execution" + } + ] + }, + { + "title": "Metadata & tags", + "path": "/concepts/metadata-tags", + "children": [ + { + "title": "Asset metadata", + "path": "/concepts/metadata-tags/asset-metadata" + }, + { + "title": "Column-level lineage", + "path": "/concepts/metadata-tags/asset-metadata/column-level-lineage" }, { - "title": "Job metadata & tags", - "path": "/concepts/ops-jobs-graphs/metadata-tags" + "title": "Kind tags", + "path": "/concepts/metadata-tags/kind-tags" + }, + { + "title": "Op job metadata", + "path": "/concepts/metadata-tags/op-job-metadata" + }, + { + "title": "Tags", + "path": "/concepts/metadata-tags/tags" + } + ] + }, + { + "title": "Dagster Pipes", + "path": "/concepts/dagster-pipes", + "children": [ + { + "title": "Dagster Pipes tutorial", + "path": "/concepts/dagster-pipes/subprocess", + "children": [ + { + "title": "Part 1: Define a Dagster asset", + "path": "/concepts/dagster-pipes/subprocess/create-subprocess-asset" + }, + { + "title": "Part 2: Modify external code", + "path": "/concepts/dagster-pipes/subprocess/modify-external-code" + }, + { + "title": "Dagster Pipes + subprocess reference", + "path": "/concepts/dagster-pipes/subprocess/reference" + } + ] + }, + { + "title": "Dagster Pipes + AWS ECS", + "path": "/concepts/dagster-pipes/aws-ecs" + }, + { + "title": "Dagster Pipes + AWS Glue", + "path": "/concepts/dagster-pipes/aws-glue" + }, + { + "title": "Dagster Pipes + AWS Lambda", + "path": "/concepts/dagster-pipes/aws-lambda" + }, + { + "title": "Dagster Pipes + Databricks", + "path": "/concepts/dagster-pipes/databricks" + }, + { + "title": "Dagster Pipes + Kubernetes", + "path": "/concepts/dagster-pipes/kubernetes" + }, + { + "title": "Details and customization", + "path": "/concepts/dagster-pipes/dagster-pipes-details-and-customization" } ] }, @@ -314,360 +443,435 @@ ] }, { - "title": "Deployment", - "icon": "Deployment", - "path": "/deployment", + "title": "Dagster+", + "icon": "Plus", + "path": "/dagster-plus", "children": [ { - "title": "Open Source", - "path": "/deployment/open-source", + "title": "Getting started", + "path": "/dagster-plus/getting-started" + }, + { + "title": "Deployment types", + "path": "/dagster-plus/deployment", "children": [ { - "title": "Overview and architecture", - "path": "/deployment/overview" + "title": "Serverless", + "path": "/dagster-plus/deployment/serverless" }, { - "title": "Concepts", - "path": "/deployment/concepts", + "title": "Hybrid", + "path": "/dagster-plus/deployment/hybrid", "children": [ { - "title": "Dagster instance", - "path": "/deployment/dagster-instance" + "title": "All agents", + "path": "/dagster-plus/deployment/agents" + }, + { + "title": "Amazon ECS", + "path": "/dagster-plus/deployment/agents/amazon-ecs", + "children": [ + { + "title": "Setup in new VPC", + "path": "/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc" + }, + { + "title": "Setup in existing VPC", + "path": "/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc" + }, + { + "title": "Manual provision", + "path": "/dagster-plus/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent" + }, + { + "title": "Configuration", + "path": "/dagster-plus/deployment/agents/amazon-ecs/configuration-reference" + }, + { + "title": "Upgrade CloudFormation", + "path": "/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template" + } + ] }, { - "title": "Dagster daemon", - "path": "/deployment/dagster-daemon" + "title": "Docker", + "path": "/dagster-plus/deployment/agents/docker", + "children": [ + { + "title": "Setup", + "path": "/dagster-plus/deployment/agents/docker/configuring-running-docker-agent" + }, + { + "title": "Configuration", + "path": "/dagster-plus/deployment/agents/docker/configuration-reference" + } + ] }, { - "title": "Run launchers", - "path": "/deployment/run-launcher" + "title": "Kubernetes", + "path": "/dagster-plus/deployment/agents/kubernetes", + "children": [ + { + "title": "Setup", + "path": "/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent" + }, + { + "title": "Configuration", + "path": "/dagster-plus/deployment/agents/kubernetes/configuration-reference" + } + ] }, { - "title": "Executors", - "path": "/deployment/executors" + "title": "Local", + "path": "/dagster-plus/deployment/agents/local" }, { - "title": "Run coordinators", - "path": "/deployment/run-coordinator" + "title": "Custom configuration", + "path": "/dagster-plus/deployment/agents/customizing-configuration" }, { - "title": "Run monitoring", - "path": "/deployment/run-monitoring" + "title": "Multiple agents", + "path": "/dagster-plus/deployment/agents/running-multiple-agents" }, { - "title": "Run retries", - "path": "/deployment/run-retries" + "title": "Setting environment variables", + "path": "/dagster-plus/managing-deployments/setting-environment-variables-agents" } ] + } + ] + }, + { + "title": "Organization settings", + "path": "/dagster-plus/account", + "children": [ + { + "title": "Tokens", + "path": "/dagster-plus/account/managing-user-agent-tokens" + } + ] + }, + { + "title": "Authentication & users", + "path": "/dagster-plus/account/authentication", + "children": [ + { + "title": "Managing users", + "path": "/dagster-plus/account/managing-users" + }, + { + "title": "Managing teams", + "path": "/dagster-plus/account/managing-users/managing-teams" }, { - "title": "Guides", - "path": "/deployment/guides", + "title": "User roles & permissions", + "path": "/dagster-plus/account/managing-users/managing-user-roles-permissions" + }, + { + "title": "SSO and provisioning", "children": [ { - "title": "Running Dagster locally", - "path": "/guides/running-dagster-locally" + "title": "SCIM provisioning", + "path": "/dagster-plus/account/authentication/utilizing-scim-provisioning" + }, + { + "title": "SSO for Azure Active Directory", + "path": "/dagster-plus/account/authentication/setting-up-azure-ad-saml-sso" }, { - "title": "Running Dagster as a service", - "path": "/deployment/guides/service" + "title": "SSO for Google Workspace", + "path": "/dagster-plus/account/authentication/setting-up-google-workspace-saml-sso" }, { - "title": "Deploying with Helm", - "path": "/deployment/guides/kubernetes", + "title": "Okta", "children": [ { - "title": "Helm", - "path": "/deployment/guides/kubernetes/deploying-with-helm" - }, - { - "title": "Celery + Helm", - "path": "/deployment/guides/kubernetes/deploying-with-helm-advanced" + "title": "SSO", + "path": "/dagster-plus/account/authentication/okta/saml-sso" }, { - "title": "Customzing deployments", - "path": "/deployment/guides/kubernetes/customizing-your-deployment" - }, - { - "title": "Migrating instances while upgrading", - "path": "/deployment/guides/kubernetes/how-to-migrate-your-instance" + "title": "SCIM provisioning", + "path": "/dagster-plus/account/authentication/okta/scim-provisioning" } ] }, { - "title": "Deploying to Docker", - "path": "/deployment/guides/docker" + "title": "SSO for OneLogin", + "path": "/dagster-plus/account/authentication/setting-up-onelogin-saml-sso" }, { - "title": "Deploying to Amazon Web Services", - "path": "/deployment/guides/aws" - }, - { - "title": "Deploying to Google Cloud Platform", - "path": "/deployment/guides/gcp" - }, - { - "title": "Executing Dagster with Celery", - "path": "/deployment/guides/celery" - }, - { - "title": "Executing Dagster with Dask", - "path": "/deployment/guides/dask" + "title": "SSO for PingOne", + "path": "/dagster-plus/account/authentication/setting-up-pingone-saml-sso" } ] } ] }, { - "title": "Cloud", - "path": "/dagster-cloud", + "title": "Managing deployments", + "path": "/dagster-plus/managing-deployments", "children": [ { - "title": "Getting started", - "path": "/dagster-cloud/getting-started" + "title": "Management", + "path": "/dagster-plus/managing-deployments/managing-deployments" }, { - "title": "Deployment types", - "path": "/dagster-cloud/deployment", + "title": "Alerts", "children": [ { - "title": "Serverless", - "path": "/dagster-cloud/deployment/serverless" + "title": "Overview", + "path": "/dagster-plus/managing-deployments/alerts" }, { - "title": "Hybrid", - "path": "/dagster-cloud/deployment/hybrid", - "children": [ - { - "title": "All agents", - "path": "/dagster-cloud/deployment/agents" - }, - { - "title": "Amazon ECS", - "path": "/dagster-cloud/deployment/agents/amazon-ecs", - "children": [ - { - "title": "Setup in new VPC", - "path": "/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc" - }, - { - "title": "Setup in existing VPC", - "path": "/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc" - }, - { - "title": "Manual provision", - "path": "/dagster-cloud/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent" - }, - { - "title": "Configuration", - "path": "/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference" - }, - { - "title": "Upgrade CloudFormation", - "path": "/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template" - } - ] - }, - { - "title": "Docker", - "path": "/dagster-cloud/deployment/agents/docker", - "children": [ - { - "title": "Setup", - "path": "/dagster-cloud/deployment/agents/docker/configuring-running-docker-agent" - }, - { - "title": "Configuration", - "path": "/dagster-cloud/deployment/agents/docker/configuration-reference" - } - ] - }, - { - "title": "Kubernetes", - "path": "/dagster-cloud/deployment/agents/kubernetes", - "children": [ - { - "title": "Setup", - "path": "/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent" - }, - { - "title": "Configuration", - "path": "/dagster-cloud/deployment/agents/kubernetes/configuration-reference" - } - ] - }, - { - "title": "Local", - "path": "/dagster-cloud/deployment/agents/local" - }, - { - "title": "Custom configuration", - "path": "/dagster-cloud/deployment/agents/customizing-configuration" - }, - { - "title": "Multiple agents", - "path": "/dagster-cloud/deployment/agents/running-multiple-agents" - }, - { - "title": "Setting environment variables", - "path": "/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents" - } - ] + "title": "Managing alerts in the UI", + "path": "/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui" + }, + { + "title": "Managing alerts with the CLI", + "path": "/dagster-plus/managing-deployments/alerts/managing-alerts-cli" + }, + { + "title": "Email", + "path": "/dagster-plus/managing-deployments/alerts/email" + }, + { + "title": "Microsoft Teams", + "path": "/dagster-plus/managing-deployments/alerts/microsoft-teams" + }, + { + "title": "Slack", + "path": "/dagster-plus/managing-deployments/alerts/slack" + }, + { + "title": "PagerDuty", + "path": "/dagster-plus/managing-deployments/alerts/pagerduty" } ] }, { - "title": "Organization settings", - "path": "/dagster-cloud/account", + "title": "Environment variables & secrets", "children": [ { - "title": "Tokens", - "path": "/dagster-cloud/account/managing-user-agent-tokens" + "title": "Overview and setup", + "path": "/dagster-plus/managing-deployments/environment-variables-and-secrets" + }, + { + "title": "Setup for Hybrid agents", + "path": "/dagster-plus/managing-deployments/setting-environment-variables-agents" + }, + { + "title": "Reserved environment variables", + "path": "/dagster-plus/managing-deployments/reserved-environment-variables" } ] }, { - "title": "Authentication & users", - "path": "/dagster-cloud/account/authentication", + "title": "Settings", + "path": "/dagster-plus/managing-deployments/deployment-settings-reference" + }, + { + "title": "Code locations", + "path": "/dagster-plus/managing-deployments/code-locations", "children": [ { - "title": "Managing users", - "path": "/dagster-cloud/account/managing-users" + "title": "dagster_cloud.yaml", + "path": "/dagster-plus/managing-deployments/dagster-cloud-yaml" + } + ] + }, + { + "title": "Branch deployments (CI)", + "path": "/dagster-plus/managing-deployments/branch-deployments", + "children": [ + { + "title": "Overview", + "path": "/dagster-plus/managing-deployments/branch-deployments" }, { - "title": "Managing teams", - "path": "/dagster-cloud/account/managing-users/managing-teams" + "title": "Using with GitHub", + "path": "/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github" }, { - "title": "User roles and permissions", - "path": "/dagster-cloud/account/managing-users/managing-user-roles-permissions" + "title": "Using with Gitlab", + "path": "/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab" }, { - "title": "SSO and provisioning", - "children": [ - { - "title": "SCIM provisioning", - "path": "/dagster-cloud/account/authentication/utilizing-scim-provisioning" - }, - { - "title": "SSO for Azure Active Directory", - "path": "/dagster-cloud/account/authentication/setting-up-azure-ad-saml-sso" - }, - { - "title": "SSO for Google Workspace", - "path": "/dagster-cloud/account/authentication/setting-up-google-workspace-saml-sso" - }, - { - "title": "Okta", - "children": [ - { - "title": "SSO", - "path": "/dagster-cloud/account/authentication/okta/saml-sso" - }, - { - "title": "SCIM provisioning", - "path": "/dagster-cloud/account/authentication/okta/scim-provisioning" - } - ] - }, - { - "title": "SSO for OneLogin", - "path": "/dagster-cloud/account/authentication/setting-up-onelogin-saml-sso" - }, - { - "title": "SSO for PingOne", - "path": "/dagster-cloud/account/authentication/setting-up-pingone-saml-sso" - } - ] + "title": "Using with dagster-plus CLI", + "path": "/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments" + }, + { + "title": "Change Tracking", + "path": "/dagster-plus/managing-deployments/branch-deployments/change-tracking" } ] }, { - "title": "Managing deployments", - "path": "/dagster-cloud/managing-deployments", + "title": "Compute logs & error messages", + "path": "/dagster-plus/managing-deployments/controlling-logs" + } + ] + }, + { + "title": "Best practices", + "path": "/dagster-plus/best-practices", + "children": [ + { + "title": "Managing multiple projects & teams", + "path": "/dagster-plus/best-practices/managing-multiple-projects-and-teams" + } + ] + }, + { + "title": "Insights", + "path": "/dagster-plus/insights", + "children": [ + { + "title": "Integrating asset metadata", + "path": "/dagster-plus/insights/asset-metadata" + }, + { + "title": "Integrating external metrics", + "path": "/dagster-plus/insights/integrating-external-metrics", "children": [ { - "title": "Management", - "path": "/dagster-cloud/managing-deployments/managing-deployments" + "title": "Google BigQuery", + "path": "/dagster-plus/insights/integrating-bigquery" }, { - "title": "Alerts", - "path": "/dagster-cloud/managing-deployments/setting-up-alerts" + "title": "Google BigQuery & dbt", + "path": "/dagster-plus/insights/integrating-bigquery-and-dbt" }, { - "title": "Environment variables & secrets", - "children": [ - { - "title": "Overview and setup", - "path": "/dagster-cloud/managing-deployments/environment-variables-and-secrets" - }, - { - "title": "Setup for Hybrid agents", - "path": "/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents" - } - ] - }, - { - "title": "Settings", - "path": "/dagster-cloud/managing-deployments/deployment-settings-reference" - }, - { - "title": "Code locations", - "path": "/dagster-cloud/managing-deployments/code-locations", - "children": [ - { - "title": "dagster_cloud.yaml", - "path": "/dagster-cloud/managing-deployments/dagster-cloud-yaml" - } - ] + "title": "Snowflake", + "path": "/dagster-plus/insights/integrating-snowflake" }, { - "title": "Branch deployments (CI)", - "path": "/dagster-cloud/managing-deployments/branch-deployments", - "children": [ - { - "title": "Using with GitHub", - "path": "/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github" - }, - { - "title": "Using with Gitlab", - "path": "/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab" - }, - { - "title": "Using with dagster-cloud CLI", - "path": "/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments" - } - ] + "title": "Snowflake & dbt", + "path": "/dagster-plus/insights/integrating-snowflake-and-dbt" } ] }, { - "title": "Insights", - "path": "/dagster-cloud/insights", + "title": "Exporting metrics", + "path": "/dagster-plus/insights/exporting-insights-metrics" + } + ] + }, + { + "title": "dagster-cloud CLI", + "path": "/dagster-plus/managing-deployments/dagster-plus-cli" + }, + { + "title": "References", + "children": [ + { + "title": "CI/CD files", + "path": "/dagster-plus/references/ci-cd-file-reference" + }, + { + "title": "dagster_cloud.yaml", + "path": "/dagster-plus/managing-deployments/dagster-cloud-yaml" + } + ] + } + ] + }, + { + "title": "Open Source Deployment", + "icon": "Deployment", + "path": "/deployment", + "children": [ + { + "title": "Overview and architecture", + "path": "/deployment/overview" + }, + { + "title": "Concepts", + "path": "/deployment/concepts", + "children": [ + { + "title": "Dagster instance", + "path": "/deployment/dagster-instance" + }, + { + "title": "Dagster daemon", + "path": "/deployment/dagster-daemon" + }, + { + "title": "Run launchers", + "path": "/deployment/run-launcher" + }, + { + "title": "Executors", + "path": "/deployment/executors" + }, + { + "title": "Run coordinators", + "path": "/deployment/run-coordinator" + }, + { + "title": "Run monitoring", + "path": "/deployment/run-monitoring" + }, + { + "title": "Run retries", + "path": "/deployment/run-retries" + } + ] + }, + { + "title": "Guides", + "path": "/deployment/guides", + "children": [ + { + "title": "Running Dagster locally", + "path": "/guides/running-dagster-locally" + }, + { + "title": "Running Dagster as a service", + "path": "/deployment/guides/service" + }, + { + "title": "Deploying with Helm", + "path": "/deployment/guides/kubernetes", "children": [ { - "title": "Integrating external metrics", - "path": "/dagster-cloud/insights/integrating-external-metrics", - "children": [ - { - "title": "Direct Snowflake queries", - "path": "/dagster-cloud/insights/integrating-snowflake" - }, - { - "title": "Snowflake + dbt", - "path": "/dagster-cloud/insights/integrating-snowflake-and-dbt" - } - ] + "title": "Helm", + "path": "/deployment/guides/kubernetes/deploying-with-helm" + }, + { + "title": "Celery & Helm", + "path": "/deployment/guides/kubernetes/deploying-with-helm-advanced" + }, + { + "title": "Customizing deployments", + "path": "/deployment/guides/kubernetes/customizing-your-deployment" }, { - "title": "Exporting metrics", - "path": "/dagster-cloud/insights/exporting-insights-metrics" + "title": "Migrating instances while upgrading", + "path": "/deployment/guides/kubernetes/how-to-migrate-your-instance" } ] }, { - "title": "dagster-cloud CLI", - "path": "/dagster-cloud/managing-deployments/dagster-cloud-cli" + "title": "Deploying to Docker", + "path": "/deployment/guides/docker" + }, + { + "title": "Deploying to Amazon Web Services", + "path": "/deployment/guides/aws" + }, + { + "title": "Deploying to Google Cloud Platform", + "path": "/deployment/guides/gcp" + }, + { + "title": "Executing Dagster with Celery", + "path": "/deployment/guides/celery" + }, + { + "title": "Executing Dagster with Dask", + "path": "/deployment/guides/dask" } ] } @@ -683,47 +887,33 @@ "path": "/integrations/airbyte", "children": [ { - "title": "Airbyte + Dagster", + "title": "Airbyte & Dagster", "path": "/integrations/airbyte" }, { - "title": "Airbyte Cloud + Dagster", + "title": "Airbyte Cloud & Dagster", "path": "/integrations/airbyte-cloud" - }, - { - "title": "Ingestion as code", - "path": "/guides/dagster/airbyte-ingestion-as-code" } ] }, { "title": "Airflow", - "path": "/integrations/airflow", - "children": [ - { - "title": "Learning Dagster from Airflow", - "path": "/integrations/airflow/from-airflow-to-dagster" - }, - { - "title": "Migrating from Airflow", - "path": "/integrations/airflow/migrating-to-dagster" - }, - { - "title": "Reference", - "path": "/integrations/airflow/reference" - } - ] + "path": "/integrations/airflow" }, { "title": "Databricks", - "path": "/integrations/databricks" + "path": "/concepts/dagster-pipes/databricks" }, { "title": "dbt", "path": "/integrations/dbt", "children": [ { - "title": "dbt + Dagster tutorial", + "title": "Quickstart", + "path": "/integrations/dbt/quickstart" + }, + { + "title": "dbt & Dagster tutorial", "path": "/integrations/dbt/using-dbt-with-dagster", "children": [ { @@ -744,6 +934,20 @@ } ] }, + { + "title": "Using dbt with Dagster+", + "path": "/integrations/dbt/using-dbt-with-dagster-plus", + "children": [ + { + "title": "Using dbt with Serverless deployments in Dagster+", + "path": "/integrations/dbt/using-dbt-with-dagster-plus/serverless" + }, + { + "title": "Using dbt with Hybrid deployments in Dagster+", + "path": "/integrations/dbt/using-dbt-with-dagster-plus/hybrid" + } + ] + }, { "title": "Reference", "path": "/integrations/dbt/reference" @@ -759,7 +963,7 @@ "path": "/integrations/deltalake", "children": [ { - "title": "Delta Lake + Dagster tutorial", + "title": "Delta Lake & Dagster tutorial", "path": "/integrations/deltalake/using-deltalake-with-dagster" }, { @@ -773,7 +977,7 @@ "path": "/integrations/duckdb", "children": [ { - "title": "DuckDB + Dagster tutorial", + "title": "DuckDB & Dagster tutorial", "path": "/integrations/duckdb/using-duckdb-with-dagster" }, { @@ -784,22 +988,28 @@ }, { "title": "Embedded ELT", - "path": "/integrations/embedded-elt" + "path": "/integrations/embedded-elt", + "children": [ + { + "title": "dlt & Dagster", + "path": "/integrations/embedded-elt/dlt" + }, + { + "title": "Sling & Dagster", + "path": "/integrations/embedded-elt/sling" + } + ] }, { "title": "Fivetran", "path": "/integrations/fivetran" }, - { - "title": "Great Expectations", - "path": "/integrations/great-expectations" - }, { "title": "Google BigQuery", "path": "/integrations/bigquery", "children": [ { - "title": "Google BigQuery + Dagster tutorial", + "title": "Google BigQuery & Dagster tutorial", "path": "/integrations/bigquery/using-bigquery-with-dagster" }, { @@ -813,7 +1023,7 @@ "path": "/integrations/dagstermill", "children": [ { - "title": "Jupyter, Papermill + Dagster tutorial", + "title": "Jupyter, Papermill & Dagster tutorial", "path": "/integrations/dagstermill/using-notebooks-with-dagster" }, { @@ -822,6 +1032,10 @@ } ] }, + { + "title": "OpenAI", + "path": "/integrations/openai" + }, { "title": "Pandas", "path": "/integrations/pandas" @@ -839,8 +1053,17 @@ "path": "/integrations/snowflake", "children": [ { - "title": "Snowflake + Dagster tutorial", - "path": "/integrations/snowflake/using-snowflake-with-dagster" + "title": "Snowflake & Dagster tutorials", + "children": [ + { + "title": "With resources", + "path": "/integrations/snowflake/using-snowflake-with-dagster" + }, + { + "title": "With I/O managers", + "path": "/integrations/snowflake/using-snowflake-with-dagster-io-managers" + } + ] }, { "title": "Reference", @@ -884,6 +1107,19 @@ } ] }, + { + "title": "Integrations", + "children": [ + { + "title": "Approaches to writing integrations", + "path": "/guides/integrations/approaches-to-writing-integrations" + }, + { + "title": "Writing a multi-asset decorator integration", + "path": "/guides/integrations/writing-a-multi-asset-decorator-integration" + } + ] + }, { "title": "Working with data assets", "children": [ @@ -941,46 +1177,6 @@ } ] }, - { - "title": "Dagster Pipes (Experimental)", - "path": "/guides/dagster-pipes", - "children": [ - { - "title": "Dagster Pipes tutorial", - "path": "/guides/dagster-pipes/subprocess", - "children": [ - { - "title": "Part 1: Define a Dagster asset", - "path": "/guides/dagster-pipes/subprocess/create-subprocess-asset" - }, - { - "title": "Part 2: Modify external code", - "path": "/guides/dagster-pipes/subprocess/modify-external-code" - }, - { - "title": "Dagster Pipes + subprocess reference", - "path": "/guides/dagster-pipes/subprocess/reference" - } - ] - }, - { - "title": "Dagster Pipes + AWS Lambda", - "path": "/guides/dagster-pipes/aws-lambda" - }, - { - "title": "Dagster Pipes + Databricks", - "path": "/guides/dagster-pipes/databricks" - }, - { - "title": "Dagster Pipes + Kubernetes", - "path": "/guides/dagster-pipes/kubernetes" - }, - { - "title": "Details and customization", - "path": "/guides/dagster-pipes/dagster-pipes-details-and-customization" - } - ] - }, { "title": "Version migration", "path": "/migration", @@ -988,10 +1184,19 @@ }, { "title": "Migrating to Dagster", + "path": "/guides/migrations", "children": [ + { + "title": "Learning Dagster from Airflow", + "path": "/integrations/airflow/from-airflow-to-dagster" + }, { "title": "Migrating from Airflow", - "path": "/integrations/airflow/migrating-to-dagster" + "path": "/guides/migrations/migrating-airflow-to-dagster" + }, + { + "title": "Observe your Airflow pipelines with Dagster", + "path": "/guides/migrations/observe-your-airflow-pipelines-with-dagster" } ] }, @@ -1039,13 +1244,13 @@ { "title": "Experimental features", "children": [ - { - "title": "Airbyte ingestion as code", - "path": "/guides/dagster/airbyte-ingestion-as-code" - }, { "title": "Asset versioning and caching", "path": "/guides/dagster/asset-versioning-and-caching" + }, + { + "title": "Linking to code in Dagster with code references", + "path": "/guides/dagster/code-references" } ] } @@ -1062,9 +1267,13 @@ "title": "Core", "children": [ { - "title": "Software-defined Assets", + "title": "Assets", "path": "/_apidocs/assets" }, + { + "title": "Metadata", + "path": "/_apidocs/metadata" + }, { "title": "Jobs", "path": "/_apidocs/jobs" @@ -1130,16 +1339,21 @@ "path": "/_apidocs/dynamic" }, { - "title": "Asset checks (Experimental)", + "title": "Asset checks", "path": "/_apidocs/asset-checks" }, { - "title": "External Assets (Experimental)", - "path": "/_apidocs/external-assets" - }, - { - "title": "Job versioning & memoization (Deprecated)", - "path": "/_apidocs/memoization" + "title": "External assets (Experimental)", + "children": [ + { + "title": "Instance API", + "path": "/_apidocs/external-assets" + }, + { + "title": "REST APIs", + "path": "/apidocs/external-assets-rest" + } + ] }, { "title": "Repositories", @@ -1170,10 +1384,6 @@ "title": "Airbyte (dagster-airbyte)", "path": "/_apidocs/libraries/dagster-airbyte" }, - { - "title": "Airflow (dagster-airflow)", - "path": "/_apidocs/libraries/dagster-airflow" - }, { "title": "Amazon Web Services (AWS) (dagster-aws)", "path": "/_apidocs/libraries/dagster-aws" @@ -1183,11 +1393,11 @@ "path": "/_apidocs/libraries/dagster-celery" }, { - "title": "Celery + Docker (dagster-celery-docker)", + "title": "Celery & Docker (dagster-celery-docker)", "path": "/_apidocs/libraries/dagster-celery-docker" }, { - "title": "Celery + Kubernetes (dagster-celery-k8s)", + "title": "Celery & Kubernetes (dagster-celery-k8s)", "path": "/_apidocs/libraries/dagster-celery-k8s" }, { @@ -1227,17 +1437,21 @@ "path": "/_apidocs/libraries/dagster-duckdb" }, { - "title": "DuckDB + Pandas (dagster-duckdb-pandas)", + "title": "DuckDB & Pandas (dagster-duckdb-pandas)", "path": "/_apidocs/libraries/dagster-duckdb-pandas" }, { - "title": "DuckDB + PySpark (dagster-duckdb-pyspark)", + "title": "DuckDB & PySpark (dagster-duckdb-pyspark)", "path": "/_apidocs/libraries/dagster-duckdb-pyspark" }, { - "title": "DuckDB + Polars (dagster-duckdb-polars)", + "title": "DuckDB & Polars (dagster-duckdb-polars)", "path": "/_apidocs/libraries/dagster-duckdb-polars" }, + { + "title": "Embedded ELT (dagster-embedded-elt)", + "path": "/_apidocs/libraries/dagster-embedded-elt" + }, { "title": "Fivetran (dagster-fivetran)", "path": "/_apidocs/libraries/dagster-fivetran" @@ -1247,11 +1461,11 @@ "path": "/_apidocs/libraries/dagster-gcp" }, { - "title": "GCP + Pandas (dagster-gcp-pandas)", + "title": "GCP & Pandas (dagster-gcp-pandas)", "path": "/_apidocs/libraries/dagster-gcp-pandas" }, { - "title": "GCP + PySpark (dagster-gcp-pyspark)", + "title": "GCP & PySpark (dagster-gcp-pyspark)", "path": "/_apidocs/libraries/dagster-gcp-pyspark" }, { @@ -1270,6 +1484,10 @@ "title": "Kubernetes (dagster-k8s)", "path": "/_apidocs/libraries/dagster-k8s" }, + { + "title": "Looker (dagster-looker)", + "path": "/_apidocs/libraries/dagster-looker" + }, { "title": "MLflow (dagster-mlflow)", "path": "/_apidocs/libraries/dagster-mlflow" @@ -1302,6 +1520,10 @@ "title": "Papertrail (dagster-papertrail)", "path": "/_apidocs/libraries/dagster-papertrail" }, + { + "title": "Polars (dagster-polars)", + "path": "/_apidocs/libraries/dagster-polars" + }, { "title": "PostgreSQL (dagster-postgres)", "path": "/_apidocs/libraries/dagster-postgres" @@ -1327,11 +1549,11 @@ "path": "/_apidocs/libraries/dagster-snowflake" }, { - "title": "Snowflake + Pandas (dagster-snowflake-pandas)", + "title": "Snowflake & Pandas (dagster-snowflake-pandas)", "path": "/_apidocs/libraries/dagster-snowflake-pandas" }, { - "title": "Snowflake + PySpark (dagster-snowflake-pyspark)", + "title": "Snowflake & PySpark (dagster-snowflake-pyspark)", "path": "/_apidocs/libraries/dagster-snowflake-pyspark" }, { diff --git a/docs/content/about/releases.mdx b/docs/content/about/releases.mdx index 7847de01ce32e..ed11d3856432a 100644 --- a/docs/content/about/releases.mdx +++ b/docs/content/about/releases.mdx @@ -1,8 +1,8 @@ --- -title: Releases and Compatibility | Dagster +title: "Releases and compatibility | Dagster Docs" --- -# Releases and Compatibility +# Releases and compatibility We follow [Semantic Versioning](https://semver.org/) for compatibility between Dagster releases. @@ -44,13 +44,28 @@ While technically the 0.y.z phase of Semantic Versioning is "anything goes", we --- -## Python Version Support +## Version compatibility within a Dagster deployment -Each Dagster release strives to support the currently active versions of python. +Dagster deployments can have multiple versions of Dagster running simultaneously. -When a new version of python is released, Dagster will work to add support once Dagster's own core dependencies have been updated to support it. +A Dagster deployment consists of a set of [code locations](https://docs.dagster.io/concepts/code-locations), as well as _host processes_: the web server and [daemon](https://docs.dagster.io/deployment/dagster-daemon). -When a version of python reaches end of life, Dagster will drop support for it at the next convenient non-patch release. +- **Code locations** within a deployment can each depend on a different Dagster version. +- **Host processes** within a deployment are expected to have the same Dagster version. That Dagster version is expected to be greater than or equal to the greatest Dagster version used by any code location within the deployment. + + - **In Dagster+ deployments**, Dagster+ automatically keeps host processes up-to-date so no user action is required. + + - **In OSS deployments**, users are expected to upgrade their host processes before upgrading the version of Dagster used in their code locations. + +--- + +## Python version support + +Each Dagster release strives to support the currently active versions of Python. + +When a new version of Python is released, Dagster will work to add support once Dagster's own core dependencies have been updated to support it. **Note**: Some external libraries may not always be compatible with the latest version of Python. + +When a version of Python reaches end of life, Dagster will drop support for it at the next convenient non-patch release. --- diff --git a/docs/content/about/telemetry.mdx b/docs/content/about/telemetry.mdx index f819c3e6f1fd5..f0d7d80cbe431 100644 --- a/docs/content/about/telemetry.mdx +++ b/docs/content/about/telemetry.mdx @@ -6,27 +6,15 @@ title: "Dagster telemetry | Dagster Docs" As an open source project, we collect usage statistics to better understand how users engage with Dagster and to inform development priorities. Telemetry data will motivate projects such as adding functionality in frequently-used parts of the product and will help us understand adoption of new features. -The following is an example telemetry blob: - -```json -{ - "location_name_hash": "94ca34d0fb35a5612a30090cac5caef430f7ce377368177da02fd8e0535752f6", - "num_assets_in_repo": "1", - "num_dynamic_partitioned_assets_in_repo": "0", - "num_pipelines_in_repo": "1", - "num_schedules_in_repo": "0", - "num_sensors_in_repo": "0", - "pipeline_name_hash": "", - "repo_hash": "f17e9128abe12b4ff329425c469a7c5abc06bace32a2237848bc3a71cf9ef808", - "source": "dagster-webserver" -} -``` +We collect telemetry from both the frontend and backend. We do not collect any data processed by Dagster pipelines, and we do not collect any identifiable information about your Dagster definitions, including the names of your assets, ops, or jobs. + +Front end telemetry is collected from a JavaScript bundle hosted unminified at `https://dagster.io/oss-telemetry.js`. This bundle may change over time. -We will not see or store any data that is processed within ops and jobs. We will not see or store op definitions (including generated context) or job definitions (including resources). +Backend telemetry collection is logged at `$DAGSTER_HOME/logs/` if `$DAGSTER_HOME` is set or `~/.dagster/logs/` if not set. -To see the logs we send, open `$DAGSTER_HOME/logs/` if `$DAGSTER_HOME` is set or `~/.dagster/logs/` if not set. +Use of telemetry data is governed by the [Dagster Privacy Policy](https://dagster.io/privacy). -If you'd like to opt-out, you can add the following to `$DAGSTER_HOME/dagster.yaml` (creating that file if necessary): +If you’d like to opt-out, you can add the following to `$DAGSTER_HOME/dagster.yaml` (creating that file if necessary): ```yaml telemetry: diff --git a/docs/content/api/modules.json.gz b/docs/content/api/modules.json.gz index 0cec5aa26de1d..da317684c6605 100644 Binary files a/docs/content/api/modules.json.gz and b/docs/content/api/modules.json.gz differ diff --git a/docs/content/api/searchindex.json.gz b/docs/content/api/searchindex.json.gz index 35a1832f7d94d..d4b8681b501cc 100644 Binary files a/docs/content/api/searchindex.json.gz and b/docs/content/api/searchindex.json.gz differ diff --git a/docs/content/api/sections.json.gz b/docs/content/api/sections.json.gz index c4e4ebbd56cfe..baebcdc36276c 100644 Binary files a/docs/content/api/sections.json.gz and b/docs/content/api/sections.json.gz differ diff --git a/docs/content/apidocs/external-assets-rest.mdx b/docs/content/apidocs/external-assets-rest.mdx new file mode 100644 index 0000000000000..70395754801b9 --- /dev/null +++ b/docs/content/apidocs/external-assets-rest.mdx @@ -0,0 +1,763 @@ +--- +title: "External assets REST API reference | Dagster Docs" +--- + +# External assets REST API reference + +As Dagster doesn't control scheduling or materializing [external assets](/concepts/assets/external-assets), it's up to you to keep their metadata updated. Use the endpoints described in this reference to report updates for external assets back to Dagster. + +--- + +## API functionality + +Using the External Asset APIs, you can: + +- Report an event for an external asset to Dagster +- Report an evaluation for an external asset to Dagster +- Report an event for an external asset to Dagster + +--- + +## Authentication + +Authentication is required only if requests are being made against a [Dagster+ instance](/dagster-plus). To authenticate, provide a valid [Dagster+ user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) using the `Dagster-Cloud-Api-Token` header: + +```bash +curl --request POST \ + --url https://{ORGANIZATION}.dagster.cloud/{deployment_name}/report_asset_materialization/ \ + --header 'Content-Type: application/json' \ + --header 'Dagster-Cloud-Api-Token: {TOKEN}' \ + --data '{ + "asset_key": "{ASSET_KEY}", + "metadata": { + "rows": 10 + }, + }' +``` + +--- + +## Constructing request URLs + +The full URL you send requests to will vary depending on how you access your Dagster instance: + +| Location | URL | Example request URL | +| ---------------------- | ----------------------------------------------- | ----------------------------------------------------- | +| Local webserver | Defaults to `localhost:3000` | `localhost:3000/report_asset_check` | +| Dagster+ | `{ORGANIZATION}.dagster.plus/{DEPLOYMENT_NAME}` | `https://my-org.dagster.plus/prod/report_asset_check` | +| Open source deployment | URL of the Dagster webserver | `https://dagster.my-org.com/report_asset_check` | + +--- + +## Available APIs + +| Endpoint | Description | +| ------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | +| [`POST /report_asset_materialization/`](#report-an-asset-materialization) | Records an event for an external asset. | +| [`POST /report_asset_check/`](#report-an-asset-check-evaluation) | Records an evaluation for an external asset. | +| [`POST /report_asset_observation/`](#report-an-asset-observation) | Records an event for an external asset. | + +### Report an asset materialization + +Records an event for an external asset. This event type notifies the Dagster framework that a materialized value has been produced for an asset. + +Using this endpoint, you could report to Dagster that an [external asset](/concepts/assets/external-assets) has been updated and include [metadata about the materialization](/concepts/metadata-tags/asset-metadata). For example, the number of updated rows, the time the update occurred, and so on. + +#### Resources + + + + + + + + + + + + + + + + + + + + + + + + +
+ Method + POST
+ Resource URL + + /report_asset_materialization/ +
+ Authentication + + Required only for Dagster+. The request header must + contain the Dagster-Cloud-Api-Token header and a valid user + token. +
+ Request header + + The request header must specify the following: +
    +
  • + Dagster-Cloud-Api-Token - Required if using Dagster+, + e.g. Dagster-Cloud-Api-Token: [USER_TOKEN] +
  • +
  • + Content-Type - Required if the request contains a JSON + body, specified as Content-Type: application/json +
  • +
+
+ Request body + If included, the request body must be valid JSON.
+ +#### Parameters + +Parameters can be passed in multiple ways and will be considered in the following order: + +1. URL (`asset_key` only) +2. Request body, which must be valid JSON +3. Query parameter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Required/Optional + Description
asset_keyRequired + The key of the materialized asset. May be passed as: +
    +
  • + URL path - Specified as path components after{" "} + /report_asset_materialization/, where each{" "} + / delimits parts of a multipart{" "} + . +
  • +
  • + JSON body - Value is passed to the{" "} + constructor. +
  • +
  • + Query parameter - Accepts string or JSON encoded + array for multipart keys. +
  • +
+
metadataOptional + Arbitrary metadata about the asset, specified as key-value pairs. May be + passed as: +
    +
  • + JSON body - Value is passed to the{" "} + constructor. +
  • +
  • + Query parameter - Accepts a JSON encoded object. +
  • +
+
data_versionOptional + The data version of the asset associated with the materialization. May + be passed in JSON body or as a query parameter; value is passed to{" "} + via tags. +
descriptionOptional + A human-readable description of the materialized value. May be passed in + JSON body or as a query parameter; value is passed to the{" "} + constructor. +
partitionOptional + The name of the partition that was materialized. May be passed in JSON + body or as a query parameter; value is passed to the{" "} + constructor. +
+ +#### Returns + +The API will return JSON, whether the request succeeds or fails. + +- `200 OK` - Response body contains an empty object: `{}` +- `400 Bad request` - Response body contains an `error` object: `{"error": ...}` + +#### Examples + + + + +##### Local webserver + +Report an asset materialization against locally running webserver: + +```bash +curl -X POST localhost:3000/report_asset_materialization/{ASSET_KEY} +``` + + + + +##### Dagster+ + +Report an asset materialization against Dagster+ with a JSON body via cURL: + +```bash +curl --request POST \ + --url https://{ORGANIZATION}.dagster.cloud/{DEPLOYMENT_NAME}/report_asset_materialization/ \ + --header 'Content-Type: application/json' \ + --header 'Dagster-Cloud-Api-Token: {TOKEN}' \ + --data '{ + "asset_key": "{ASSET_KEY}", + "metadata": { + "rows": 10 + }, + }' +``` + +Report an asset materialization against Dagster+ in Python using `requests`: + +```python +import requests + +url = "https://{ORGANIZATION}.dagster.cloud/{DEPLOYMENT_NAME}/report_asset_materialization/" + +payload = { + "asset_key": "ASSET_KEY", + "metadata": {"rows": 10}, +} +headers = { + "Content-Type": "application/json", + "Dagster-Cloud-Api-Token": "TOKEN" +} + +response = requests.request("POST", url, json=payload, headers=headers) +response.raise_for_status() +``` + + + + +##### Open source deployment + +Report an asset materialization against an open source deployment (hosted at `DAGSTER_WEBSERVER_HOST`) in Python using `requests`: + +```python +import requests + +url = f"{DAGSTER_WEBSERVER_HOST}/report_asset_materialization/{ASSET_KEY}" +response = requests.request("POST", url) +response.raise_for_status() +``` + + + + +--- + +### Report an asset check evaluation + +Records an AssetCheckEvaluation event for an external asset. This event type notifies the Dagster framework of the results of an executed check. + +Using this endpoint, you could report to Dagster that an [asset check](/concepts/assets/asset-checks) has been executed and include metadata about the check. For example, if the check looks for `null` values in an `id` column, you could include the number of records with `null` IDs. + +#### Resources + + + + + + + + + + + + + + + + + + + + + + + + +
+ Method + POST
+ Resource URL + + /report_asset_check/ +
+ Authentication + + Required only for Dagster+. The request header must + contain the Dagster-Cloud-Api-Token header and a valid user + token. +
+ Request header + + The request header must specify the following: +
    +
  • + Dagster-Cloud-Api-Token - Required if using Dagster+, + e.g. Dagster-Cloud-Api-Token: [USER_TOKEN] +
  • +
  • + Content-Type - Required if the request contains a JSON + body, specified as Content-Type: application/json +
  • +
+
+ Request body + If included, the request body must be valid JSON.
+ +#### Parameters + +Parameters can be passed in multiple ways and will be considered in the following order: + +1. URL (`asset_key` only) +2. Request body, which must be valid JSON +3. Query parameter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Required/Optional + Description
asset_keyRequired + The key of the checked asset. May be passed as: +
    +
  • + URL path - Specified as path components after{" "} + /report_asset_check/, where each /{" "} + delimits parts of a multipart . +
  • +
  • + JSON body - Value is passed to the{" "} + constructor. +
  • +
  • + Query parameter - Accepts string or JSON encoded + array for multipart keys. +
  • +
+
passedRequired + The pass/fail result of the check. May be passed as: +
    +
  • + JSON body - Value is passed to the{" "} + AssetCheckEvaluation constructor. +
  • +
  • + Query parameter - Accepts a JSON encoded boolean:{" "} + true or false. +
  • +
+
check_nameRequired + The name of the check. May be passed in JSON body or as a query + parameter; value is passed to the AssetCheckEvaluation{" "} + constructor. +
metadataOptional + Arbitrary metadata about the check, specified as key-value pairs. May be + passed as: +
    +
  • + JSON body - Value is passed to the{" "} + AssetCheckEvaluation constructor. +
  • +
  • + Query parameter - Accepts a JSON encoded object. +
  • +
+
severityOptional + The severity of the check. Accepted values are: +
    +
  • + WARN +
  • +
  • + ERROR (default) +
  • +
+ May be passed in JSON body or as a query parameter; value is passed to + the AssetCheckEvaluation constructor. +
+ +#### Returns + +The API will return JSON, whether the request succeeds or fails. + +- `200 OK` - Response body contains an empty object: `{}` +- `400 Bad request` - Response body contains an `error` object: `{"error": ...}` + +#### Examples + + + + +##### Local webserver + +Report a successful asset check (`check_null_ids`) against a locally running webserver: + +```bash +curl -X POST localhost:3000/report_asset_check/{ASSET_KEY}?check_name=check_null_ids&passed=true +``` + + + + +##### Dagster+ + +Report a failed asset check (`check_null_ids`) against Dagster+ with a JSON body via cURL: + +```bash +curl --request POST \ + --url https://{ORGANIZATION}.dagster.cloud/{DEPLOYMENT_NAME}/report_asset_check/ \ + --header 'Content-Type: application/json' \ + --header 'Dagster-Cloud-Api-Token: {TOKEN}' \ + --data '{ + "asset_key": "{ASSET_KEY}, + "check_name": "check_null_ids", + "passed": false, + "metadata": { + "null_rows": 3 + }, + }' +``` + + + + +--- + +### Report an asset observation + +Records an event for an external asset. This event type captures metadata about an asset at a point in time and provides it to the Dagster framework. Refer to the [Asset observation](/concepts/assets/asset-observations) documentation for more information. + +#### Resources + + + + + + + + + + + + + + + + + + + + + + + + +
+ Method + POST
+ Resource URL + + /report_asset_observation/ +
+ Authentication + + Required only for Dagster+. The request header must + contain the Dagster-Cloud-Api-Token header and a valid user + token. +
+ Request header + + The request header must specify the following: +
    +
  • + Dagster-Cloud-Api-Token - Required if using Dagster+, + e.g. Dagster-Cloud-Api-Token: [USER_TOKEN] +
  • +
  • + Content-Type - Required if the request contains a JSON + body, specified as Content-Type: application/json +
  • +
+
+ Request body + If included, the request body must be valid JSON.
+ +#### Parameters + +Parameters can be passed in multiple ways and will be considered in the following order: + +1. URL (`asset_key` only) +2. Request body, which must be valid JSON +3. Query parameter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Required/Optional + Description
asset_keyRequired + The key of the observed asset. May be passed as: +
    +
  • + URL path - Specified as path components after{" "} + /report_asset_observation/, where each /{" "} + delimits parts of a multipart . +
  • +
  • + JSON body - Value is passed to the{" "} + constructor. +
  • +
  • + Query parameter - Accepts string or JSON encoded + array for multipart keys. +
  • +
+
metadataOptional + Arbitrary metadata about the asset, specified as key-value pairs. May be + passed as: +
    +
  • + JSON body - Value is passed to the{" "} + constructor. +
  • +
  • + Query parameter - Accepts a JSON encoded object. +
  • +
+
data_versionOptional + The data version of the observed asset. May be passed in JSON body or as + a query parameter; value is passed to{" "} + via tags. +
descriptionOptional + A human-readable description of the asset or observation. May be passed + in JSON body or as a query parameter; value is passed to the{" "} + constructor. +
partitionOptional + The name of the partition that was observed. May be passed in JSON body + or as a query parameter; value is passed to the{" "} + constructor. +
+ +#### Returns + +The API will return JSON, whether the request succeeds or fails. + +- `200 OK` - Response body contains an empty object: `{}` +- `400 Bad request` - Response body contains an `error` object: `{"error": ...}` + +#### Examples + + + + +##### Local webserver + +Report an asset observation with a data version against a locally running webserver: + +```bash +curl -X POST localhost:3000/report_asset_observation/{ASSET_KEY}?data_version={VERSION} +``` + + + + +##### Dagster+ + +Report an asset observation against Dagster+ with a JSON body via cURL: + +```bash +curl --request POST \ + --url https://{ORGANIZATION}.dagster.cloud/{DEPLOYMENT_NAME}/report_asset_observation/ \ + --header 'Content-Type: application/json' \ + --header 'Dagster-Cloud-Api-Token: {TOKEN}' \ + --data '{ + "asset_key": "{ASSET_KEY}", + "metadata": { + "rows": 10 + }, + "data_version": "{VERSION}", + }' +``` + + + diff --git a/docs/content/community/contributing.mdx b/docs/content/community/contributing.mdx index ff80be9c0e053..c87c09c91c0cd 100644 --- a/docs/content/community/contributing.mdx +++ b/docs/content/community/contributing.mdx @@ -27,9 +27,9 @@ We love to see our community members get involved! If you are planning to contri and finally create and activate the virtualenev: ```bash - pyenv install 3.9.16 - pyenv virtualenv 3.9.16 dagster39 - pyenv activate dagster39 + pyenv install 3.10.14 + pyenv virtualenv 3.10.14 dagster310 + pyenv activate dagster310 ``` 3. Ensure that you have node installed by running `node -v`, and that you have [yarn](https://yarnpkg.com/lang/en/) installed. If you are on macOS, you can install yarn with Homebrew: @@ -50,7 +50,7 @@ We love to see our community members get involved! If you are planning to contri make dev_install ``` - **Note for Macs with an M1 or M2 chip**: Some users have reported installation problems due to missing wheels for arm64 Macs when installing the `grpcio` package. To install the `dagster` development environment using our pre-built wheel of the `grpcio` package for M1 and M2 machines, run `make dev_install_m1_grpcio_wheel` instead of `make dev_install`. + **Note for Macs with an Apple silicon chip**: Some users have reported installation problems due to missing wheels for arm64 Macs when installing the `grpcio` package. To install the `dagster` development environment using our pre-built wheel of the `grpcio` package for M1, M2, and M3 machines, run `make dev_install_m1_grpcio_wheel` instead of `make dev_install`. 6. Run some tests manually to make sure things are working: @@ -75,7 +75,7 @@ For development, run an instance of the webserver providing GraphQL service on a ```bash cd dagster/examples/docs_snippets/docs_snippets/intro_tutorial/basics/connecting_ops/ -dagster-webserver -p 3333 -f complex_pipeline.py +dagster-webserver -p 3333 -f complex_job.py ``` Keep this running. Then, in another terminal, run the local development (autoreloading, etc.) version of the webapp: diff --git a/docs/content/concepts.mdx b/docs/content/concepts.mdx index eb86af66698fc..ec9302948e48a 100644 --- a/docs/content/concepts.mdx +++ b/docs/content/concepts.mdx @@ -8,17 +8,17 @@ Learn about Dagster's core concepts and how to use them in your data platform. --- -## Software-defined Assets +## Asset definition -An asset is an object in persistent storage, such as a table, file, or persisted machine learning model. A Software-defined Asset is a Dagster object that couples an asset to the function and upstream assets used to produce its contents. +An asset is an object in persistent storage, such as a table, file, or persisted machine learning model. An asset definition is a Dagster object that couples an asset to the function and upstream assets used to produce its contents. @@ -51,7 +51,7 @@ An asset is an object in persistent storage, such as a table, file, or persisted ## Automation -Dagster offers several ways to run data pipelines without manual intervation, including traditional scheduling and event-based triggers. +Dagster offers several ways to run data pipelines without manual intervention, including traditional scheduling and event-based triggers. + -### Job execution, metadata, & tags +### Metadata & tags -Learn to execute op and asset jobs, and apply metadata and tags. +Apply tags and metadata to organize your project and provide useful context to other members of your team. + + + +### Dagster Pipes + +Dagster Pipes is a toolkit for building integrations between Dagster and external execution environments. + + + + + + + + + diff --git a/docs/content/concepts/assets/asset-auto-execution.mdx b/docs/content/concepts/assets/asset-auto-execution.mdx deleted file mode 100644 index fe6e8e673ab3a..0000000000000 --- a/docs/content/concepts/assets/asset-auto-execution.mdx +++ /dev/null @@ -1,213 +0,0 @@ ---- -title: Auto-Materializing Assets | Dagster -description: You can set up Dagster to automatically materialize assets when criteria are met. ---- - -# Auto-Materializing Assets - -You can set up Dagster to automatically materialize assets when criteria are met. This enables a declarative approach to asset scheduling – instead of defining imperative workflows to materialize your assets, you just describe the conditions under which they should be materialized. - -At a high-level, the most common way for assets to be auto-materialized is "eagerly" -- immediately after upstream changes occur, a run will be kicked off to incorporate those changes into a given asset. However, the precise rules that govern when runs are kicked off can be customized on an asset-by-asset basis. - -## Turning on auto-materializing - -To enable assets to be automatically materialized, you need to first flip a toggle in the Dagster UI. - -- If you're using an open source Dagster deployment, you can get to this toggle by clicking "Deployment" in the top navigation pane and then clicking on the "Daemons" tab. -- If you're using Dagster Cloud, you can get to this toggle by clicking "Deployment" in the top navigation pane, then clicking on the "Agents" tab, then looking under "Cloud service statuses". - -## Auto-materialize policies - -You can set up an asset to be auto-materialized by assigning it an . Each policy consists of a set of `AutoMaterializeRules`, each representing individual reasons that an asset should be materialized or not at a given point in time. If there's at least one rule determining that the asset should be materialized, and no rules determining that it should be skipped, a run will be launched to materialize that asset. - -It is recommended to start with the built-in and further customize from there if necessary. This policy consists of all of the supported rules, other than `materialize_on_cron` and `skip_on_not_all_parents_updated`. The supported rules are currently: - -| Name | Description | -| ---------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| | Materialize an asset partition if one of its parents has been updated more recently than it has. | -| | Materialize an asset partition if it has never been materialized before. | -| | Materialize an asset partition if it has not been materialized since the latest tick of a given cron schedule. | -| | Materialize an asset if it is required to satisfy a of this asset or one of its downstream assets. | -| | Skip materializing an asset partition if one of its parent asset partitions has never been materialized (for regular assets) or observed (for observable source assets). | -| | Skip materializing an asset partition if any of its parents has not incorporated the latest data from its ancestors. | -| | Skip materializing an asset partition if any of its parents have not been updated since the asset's last materialization. | - -In this example, we use to indicate that, any time that `asset1` is materialized, `asset2` should be automatically materialized right after: - -```python file=/concepts/assets/auto_materialize_eager.py -from dagster import AutoMaterializePolicy, asset - - -@asset -def asset1(): - ... - - -@asset(auto_materialize_policy=AutoMaterializePolicy.eager(), deps=[asset1]) -def asset2(): - ... -``` - -This example assumes that `asset1` will be materialized in some other way - e.g. manually, via a [sensor](/concepts/partitions-schedules-sensors/sensors), or via a [schedule](/concepts/partitions-schedules-sensors/schedules). - -### Adding an auto-materialize policy to multiple assets at once - -If you want to apply the same to a set of assets, you can use the `auto_materialize_policy` argument when loading them with functions like and . - -```python file=/concepts/assets/auto_materialize_multiple.py -from dagster import ( - AutoMaterializePolicy, - Definitions, - asset, - load_assets_from_current_module, -) - - -@asset -def asset1(): - ... - - -@asset(deps=[asset1]) -def asset2(): - ... - - -defs = Definitions( - assets=load_assets_from_current_module( - auto_materialize_policy=AutoMaterializePolicy.eager(), - ) -) -``` - -### Run tags - -Runs triggered by auto-materialize policies are tagged with `dagster/auto_materialize: true`. Additional tags can be configured in [`dagster.yaml` (OSS)](/deployment/dagster-instance#auto-materialize) or [deployment settings (Cloud)](/dagster-cloud/managing-deployments/deployment-settings-reference#auto-materialize). - -### Customizing auto-materialize policies - -Auto-materialize policies can be customized by adding or removing rules. These changes will be reflected in the UI for individual assets. - -#### Auto-materialize only once all parents have been updated - -By default, the eager policy will materialize an asset whenever _any_ of its parents have been updated. In cases where an asset has many parents, this may cause more materializations than desired, as each parent update will result in an additional downstream materialization. To avoid this, the `skip_on_not_all_parents_updated` rule can be applied to a given policy to force it to wait until _all_ of an asset's parents have been updated before materializing it. - -```python file=/concepts/assets/auto_materialize_after_all_parents.py -from dagster import AutoMaterializePolicy, AutoMaterializeRule, asset - -wait_for_all_parents_policy = AutoMaterializePolicy.eager().with_rules( - AutoMaterializeRule.skip_on_not_all_parents_updated() -) - - -@asset(auto_materialize_policy=wait_for_all_parents_policy) -def asset1(upstream1, upstream2): - ... -``` - -#### Auto-materialize even if some parents are missing - -By default, the eager policy won't materialize an asset if any of its parents are missing. In some cases, it's desirable to allow the downstream asset to be materialized, even if some of its parent assets/partitions are missing. To enable this, the `skip_on_parent_missing` rule can be removed from a given policy to prevent this from blocking the materialization of an asset. - -```python file=/concepts/assets/auto_materialize_with_missing_parents.py -from dagster import AutoMaterializePolicy, AutoMaterializeRule, asset - -allow_missing_parents_policy = AutoMaterializePolicy.eager().without_rules( - AutoMaterializeRule.skip_on_parent_missing(), -) - - -@asset(auto_materialize_policy=allow_missing_parents_policy) -def asset1(upstream1, upstream2): - ... -``` - -#### Auto-materialize root assets on a regular cadence - -By default, the eager policy will only materialize an asset if it's missing or one of its parents update. This means that an unpartitioned root asset will only get auto-materialized a single time, as it has no parents which can update. In some cases, it's desirable to recompute these assets on a regular basis. To enable this, the `materialize_on_cron` rule can be added to a given policy. - -```python file=/concepts/assets/auto_materialize_on_cron.py -from dagster import AutoMaterializePolicy, AutoMaterializeRule, asset - -materialize_on_cron_policy = AutoMaterializePolicy.eager().with_rules( - # try to materialize this asset if it hasn't been materialized since the last cron tick - AutoMaterializeRule.materialize_on_cron("0 9 * * *", timezone="US/Central"), -) - - -@asset(auto_materialize_policy=materialize_on_cron_policy) -def root_asset(): - ... -``` - -### Auto-materialization and partitions - -Each generally applies individually to each partition of a partitioned asset. Here's a pipeline with two daily-partitioned assets that have eager auto-materialize policies. At the end of each day, a partition for that day will be added to the set of partitions for each of the assets. Dagster will notice that the new partitions exist, but have no materializations, and then auto-materialize them. - -```python file=/concepts/assets/auto_materialize_time_partitions.py -from dagster import AutoMaterializePolicy, DailyPartitionsDefinition, asset - - -@asset( - partitions_def=DailyPartitionsDefinition(start_date="2020-10-10"), - auto_materialize_policy=AutoMaterializePolicy.eager(), -) -def asset1(): - ... - - -@asset( - partitions_def=DailyPartitionsDefinition(start_date="2020-10-10"), - auto_materialize_policy=AutoMaterializePolicy.eager(), - deps=[asset1], -) -def asset2(): - ... -``` - -If the last partition of `asset1` is re-materialized, e.g. manually from the UI, then the corresponding partition of `asset2` will be auto-materialized after. - -By default, a given will not allow more than one partition of an asset to be materialized per minute. Any partitions exceeding this threshold will be discarded. Manual intervention will be required to materialize the discarded partitions. - -This threshold may be increased as follows: - -```python file=/concepts/assets/auto_materialize_max_materializations_per_minute.py -from dagster import AutoMaterializePolicy, DailyPartitionsDefinition, asset - - -@asset( - partitions_def=DailyPartitionsDefinition(start_date="2020-10-10"), - auto_materialize_policy=AutoMaterializePolicy.eager( - max_materializations_per_minute=7 - ), -) -def asset1(): - ... -``` - -For time-partitioned assets, the `N` most recent partitions will be selected from the set of candidates to be materialized. For other types of partitioned assets, the selection will be random. - -### Auto-materialize policies and data versions - -[Observable source assets](/concepts/assets/asset-observations#observable-source-assets) are assets that your code doesn't materialize, but that you provide a function for that can tell when they've changed. The rule incorporates the observed data versions of these assets when determining if it should fire for a downstream asset. - -In this example, we check every minute to see whether `source_file` was modified. If it was, then the on `asset1` will cause it to be materialized. - -```python file=/concepts/assets/auto_materialize_observable_source_asset.py -import os - -from dagster import AutoMaterializePolicy, DataVersion, asset, observable_source_asset - - -@observable_source_asset(auto_observe_interval_minutes=1) -def source_file(): - return DataVersion(str(os.path.getmtime("source_file.csv"))) - - -@asset( - deps=[source_file], - auto_materialize_policy=AutoMaterializePolicy.eager(), -) -def asset1(): - ... -``` diff --git a/docs/content/concepts/assets/asset-checks.mdx b/docs/content/concepts/assets/asset-checks.mdx index 33a0112becc18..82037a5232753 100644 --- a/docs/content/concepts/assets/asset-checks.mdx +++ b/docs/content/concepts/assets/asset-checks.mdx @@ -1,308 +1,122 @@ --- -title: Asset checks | Dagster +title: "Asset checks | Dagster Docs" description: Asset checks are a way to define expectations about the quality of your data. --- -# Asset checks (Experimental) +# Asset checks -Dagster allows you to define and execute data quality checks on your [Software-defined Assets](/concepts/assets/software-defined-assets). Each asset check verifies some property of a data asset, e.g. that there are no null values in a particular column. +Using asset checks, you can define and execute different types of checks on your [data assets](/concepts/assets/software-defined-assets) directly in Dagster. Each asset check tests some property of a data asset, such as: -When viewing an asset in Dagster’s UI, you can see all of its checks, and whether they’ve passed, failed, or haven’t run. +- Ensuring a particular column, like an `ID`, doesn't contain null values +- Verifying that a tabular asset adheres to a specified schema +- If the asset's data is in need of a refresh ---- +Assets, their checks, and the results of those checks can be viewed in the Dagster UI, providing you with a unified view of your pipeline's health. For example: -## Relevant APIs + -| Name | Description | -| ------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------- | -| | A decorator used to define asset checks that execute in their own [op](/concepts/ops-jobs-graphs/ops). | -| | The class returned by asset checks. | -| | A class that's passed to asset decorators to define checks that execute in the same [op](/concepts/ops-jobs-graphs/ops) as the asset. | + --- -## Defining asset checks - -### Asset check that executes in its own op - -The following code defines an asset named `orders` and an asset check named `orders_id_is_unique`. When executed, the check verifies a property of the `orders` asset: that all the values in its primary key column are unique. - -```python file=/concepts/assets/asset_checks/orders_check.py -import pandas as pd - -from dagster import AssetCheckResult, Definitions, asset, asset_check - - -@asset -def orders(): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - orders_df.to_csv("orders.csv") - - -@asset_check(asset=orders) -def orders_id_has_no_nulls(): - orders_df = pd.read_csv("orders.csv") - num_null_order_ids = orders_df["order_id"].isna().sum() - return AssetCheckResult( - passed=bool(num_null_order_ids == 0), - ) - - -defs = Definitions( - assets=[orders], - asset_checks=[orders_id_has_no_nulls], -) -``` - -
- - decorates the -orders_id_is_unique{" "} function which returns an object. - -The `orders_id_is_unique` check runs in its own [op](/concepts/ops-jobs-graphs/ops). That means that if you launch a run that materializes the `orders` asset and also executes the `orders_id_is_unique` check and you’re using the , the check will execute in a separate process from the process that materializes the asset. - -### Checks that execute in the same op that materializes the asset - -Sometimes, it makes more sense for a single function to both materialize an asset and execute a check on it. - -When defining an asset using the or decorators, you can set the `check_specs` argument. Each provided declares a check that the decorated function should yield an for. - -```python file=/concepts/assets/asset_checks/asset_with_check.py -import pandas as pd - -from dagster import ( - AssetCheckResult, - AssetCheckSpec, - AssetExecutionContext, - Definitions, - Output, - asset, -) - - -@asset(check_specs=[AssetCheckSpec(name="orders_id_has_no_nulls", asset="orders")]) -def orders(context: AssetExecutionContext): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - - # save the output and indicate that it's been saved - orders_df.to_csv("orders") - yield Output(value=None) - - # check it - num_null_order_ids = orders_df["order_id"].isna().sum() - yield AssetCheckResult( - passed=bool(num_null_order_ids == 0), - ) - - -defs = Definitions(assets=[orders]) -``` - -### Severity - -You can optionally set on check results. The default severity is `ERROR`. Severity determines how the check result appears in the UI. If a check fails with `ERROR` severity, the asset will appear red in the lineage graph. - -```python file=/concepts/assets/asset_checks/severity.py -from dagster import ( - AssetCheckResult, - AssetCheckSeverity, - Definitions, - asset, - asset_check, -) - - -@asset -def my_asset(): - ... - - -@asset_check(asset=my_asset) -def my_check(): - is_serious = ... - return AssetCheckResult( - passed=False, - severity=AssetCheckSeverity.ERROR if is_serious else AssetCheckSeverity.WARN, - ) - +## Benefits -defs = Definitions(assets=[my_asset], asset_checks=[my_check]) -``` +Using asset checks helps you: -### Adding metadata to check results - -You can add information why a check passed or failed using the `metadata` argument on . We'll add `num_null_order_ids` as metadata to the `orders_id_has_no_nulls` example: - -```python file=/concepts/assets/asset_checks/metadata.py -import pandas as pd - -from dagster import AssetCheckResult, Definitions, asset, asset_check - - -@asset -def orders(): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - orders_df.to_csv("orders.csv") - - -@asset_check(asset=orders, description="Check for null order_ids") -def orders_id_has_no_nulls(): - orders_df = pd.read_csv("orders.csv") - num_null_order_ids = orders_df["order_id"].isna().sum() - return AssetCheckResult( - passed=bool(num_null_order_ids == 0), - metadata={ - "num_null_order_ids": int(num_null_order_ids), - }, - ) - - -defs = Definitions( - assets=[orders], - asset_checks=[orders_id_has_no_nulls], -) -``` - -There are a variety of types supported via the class. You can view the metadata on the **Checks** tab of the **Asset details** page. - -### Asset check factories - -If you want to define many checks that are similar, you can use the factory pattern. Here's an example factory that accepts a list of sql statements and turns them in to asset checks. - -```python file=/concepts/assets/asset_checks/factory.py -from typing import Any, Mapping, Sequence - -from mock import MagicMock - -from dagster import ( - AssetCheckResult, - AssetChecksDefinition, - Definitions, - asset, - asset_check, -) - - -@asset -def orders(): - ... - - -@asset -def items(): - ... - - -def make_check(check_blob: Mapping[str, str]) -> AssetChecksDefinition: - @asset_check( - name=check_blob["name"], - asset=check_blob["asset"], - required_resource_keys={"db_connection"}, - ) - def _check(context): - rows = context.resources.db_connection.execute(check_blob["sql"]) - return AssetCheckResult(passed=len(rows) == 0, metadata={"num_rows": len(rows)}) - - return _check - - -check_blobs = [ - { - "name": "orders_id_has_no_nulls", - "asset": "orders", - "sql": "select * from orders where order_id is null", - }, - { - "name": "items_id_has_no_nulls", - "asset": "items", - "sql": "select * from items where item_id is null", - }, -] - -defs = Definitions( - assets=[orders, items], - asset_checks=[make_check(check_blob) for check_blob in check_blobs], - resources={"db_connection": MagicMock()}, -) -``` +- Create a single place in the Dagstser UI to learn about and investigate issues in your data pipeline +- Ensure that data produced by your pipeline is timely and high quality by applying basic suites of tests to all assets +- Identify problems in the source data that your pipeline depends on, such as checking data quality on source assets where dbt tests aren't possible +- Communicate what the data produced by your pipeline is expected to look like (aka act as a data contract) +- Raise warnings or block downstream assets from materializing when issues arise +- Track asset check results in the UI, allowing you to monitor data quality metrics over time --- -## Executing checks - -### Via the UI +## Prerequisites -Materializing an asset from the UI will also execute any checks that are defined for that asset. You can also execute checks without materializing the asset from the Checks tab of the asset’s detail page. +Before continuing, you should be familiar with: -### Via sensors and schedules +- [Assets](/concepts/assets/software-defined-assets) -You can use to define jobs that execute sets of both assets and checks, and then trigger those jobs via [sensors](/concepts/partitions-schedules-sensors/sensors) or [schedules](/concepts/partitions-schedules-sensors/schedules). By default, checks are included with the assets they check. You can also define jobs that include only checks, or only assets. - -```python file=/concepts/assets/asset_checks/jobs.py -from dagster import ( - AssetSelection, - Definitions, - ScheduleDefinition, - asset, - asset_check, - define_asset_job, -) +--- +## How it works -@asset -def my_asset(): - ... +Defined in code, asset checks are used to test some property of one or more Dagster assets. Asset checks can be defined by: +- Using custom Python code +- Imported from external systems like [dbt](/integrations/dbt/reference#loading-dbt-tests-as-asset-checks) +- Using Dagster's [built-in utility methods](#built-in-asset-checks) -@asset_check(asset=my_asset) -def check_1(): - ... +Asset checks and their results are [visible in the UI](/concepts/assets/asset-checks/define-execute-asset-checks#using-asset-check-results), allowing you to communicate useful information about data quality, data freshness, and other issues to stakeholders. Asset check results can also be used to create conditional steps in your pipelines - for example, if a quality check fails, execution can be halted to prevent issues spreading downstream. +Using schedules and sensors, you can automate the execution of jobs that include asset checks and the assets that they target. Checks can also be executed on a one-off basis using the Dagster UI. Refer to the [Executing checks](/concepts/assets/asset-checks/define-execute-asset-checks#executing-checks) section of the [Defining and executing asset checks](/concepts/assets/asset-checks/define-execute-asset-checks) guide for more info. -@asset_check(asset=my_asset) -def check_2(): - ... +--- +## Getting started -# includes my_asset and both checks -my_job = define_asset_job("my_job", selection=AssetSelection.assets(my_asset)) +Check out these guides to get started with asset checks: +
+ + +
-# includes only my_asset -my_asset_only_job = define_asset_job( - "my_asset_only_job", - selection=AssetSelection.assets(my_asset).without_checks(), -) +From here, you can: -# includes check_1 and check_2, but not my_asset -checks_only_job = define_asset_job( - "checks_only_job", selection=AssetSelection.checks_for_assets(my_asset) -) +- Block downstream execution [based on check results](/concepts/assets/asset-checks/define-execute-asset-checks#blocking-downstream-assets) +- Use severity levels and metadata [to communicate actionable details about check results in the UI](/concepts/assets/asset-checks/define-execute-asset-checks#using-asset-check-results) +- Use freshness checks [to identify stale assets that are due for a refresh](/concepts/assets/asset-checks/checking-for-data-freshness) +- **Dagster+ only**: + - [Create alerts](/dagster-plus/managing-deployments/alerts) based on asset check results + - Use [Insights' asset materialization metadata](/dagster-plus/insights/asset-metadata) to track asset check results over time -# includes only check_1 -check_1_job = define_asset_job("check_1_job", selection=AssetSelection.checks(check_1)) +### Built-in asset checks -# schedule my_job to run every day at midnight -basic_schedule = ScheduleDefinition(job=my_job, cron_schedule="0 0 * * *") +The following table lists Dagster's built-in utility methods for creating asset checks. -defs = Definitions( - assets=[my_asset], - asset_checks=[check_1, check_2], - jobs=[my_job, my_asset_only_job, checks_only_job, check_1_job], - schedules=[basic_schedule], -) -``` +| API | Description | +| ----------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| | Builds asset checks that pass if numeric metadata value falls within a particular range | +| | Builds asset checks that pass if an asset's columns are the same, compared with its prior materialization | +| | Builds asset checks that pass if not too much time has elapsed since the latest time an asset was updated | +| | Builds asset checks that pass if an asset's most recent partition has been materialized before a deadline | ---- +### Limitations -## Alerting (Dagster Cloud) +- **Dagster's UI is tested with a maximum of 1,000 checks per asset.** It's designed with the expectation that most assets will have fewer than 50 checks. If you have a use case that doesn't fit these limits, reach out to Dagster support to discuss. -You can configure alerts on asset checks using [asset based alert policies](/dagster-cloud/managing-deployments/setting-up-alerts). +- **Checks are currently only supported per-asset, not per-partition.** See [this issue](https://github.com/dagster-io/dagster/issues/17005) for updates. --- -## Limitations - -- Dagster's UI is tested with a maximum of 1000 checks per asset. It's designed with the expectation that most assets will have fewer than 50 checks. If you have a use case that doesn't fit these limits, you can reach out to discuss. - -- Checks are currently only supported per-asset, not per-partition. See [this issue](https://github.com/dagster-io/dagster/issues/17005) for updates. +## Related + + + + + + + diff --git a/docs/content/concepts/assets/asset-checks/checking-for-data-freshness.mdx b/docs/content/concepts/assets/asset-checks/checking-for-data-freshness.mdx new file mode 100644 index 0000000000000..331a71a3aada5 --- /dev/null +++ b/docs/content/concepts/assets/asset-checks/checking-for-data-freshness.mdx @@ -0,0 +1,405 @@ +--- +title: "Using asset checks to check data freshness | Dagster Docs" +description: "Use freshness checks to identify data assets that are overdue for an update." +--- + +# Using asset checks to check data freshness + +A freshness check is a type of [asset check](/concepts/assets/asset-checks) that allows you identify Dagster [assets](/concepts/assets/software-defined-assets) that are overdue for a data refresh. + +Freshness checks don't depend on a specific root cause, which makes them helpful in accounting for unknowns. For example, you can use freshness checks to identify stale assets caused by: + +- The pipeline hitting an error and failing +- Runs were never scheduled +- A backed up run queue +- Runs are taking longer than expected to complete + +For example, let's say your team uses a dashboard as part of their daily stand up. You determine that the assets powering the dashboard should be materialized no later than 7AM to ensure the data is up-to-date. If a freshness check runs at that time and the assets haven't been updated, the check will fail. The freshness check ensures you know when data doesn't arrive as expected, needing to know the specific cause. + +By the end of this guide, you'll understand what freshness checks are, how to implement them in your data pipelines, and how to be alerted when your assets are overdue. + +--- + +## How it works + +An asset is considered fresh if it's been materialized or observed within a defined time window. Otherwise, assets are considered overdue and in need of an update. To identify these assets, you can use **freshness checks**. + +Freshness checks can also communicate SLAs for their data freshness. For example, downstream asset consumers can look at the checks that are defined on those assets to determine when and how often they’re expected to be updated. + +To determine if an asset is overdue, a freshness check will use the asset's latest update time. How Dagster determines the last update time varies on the type of asset being targeted: + +- **For source assets**, which are assets whose data feeds a Dagster pipeline, freshness checks depend on asset observations to report the time the asset was last updated. In this guide, we'll demonstrate how to use [observable source assets and a schedule](#defining-freshness-checks-for-source-assets) to achieve this. + +- **For materializable assets**, which are assets materialized by Dagster, Dagster infers the asset's last update time using the its latest materialization timestamp. In this guide, we'll demonstrate how to use [`@asset`-decorated assets and a sensor](#defining-freshness-checks-for-materializable-assets) to achieve this. + +--- + +## Prerequisites + +Before continuing, you should be familiar with: + +- [Asset definitions](/concepts/assets/software-defined-assets) +- [Asset checks](/concepts/assets/asset-checks) +- [Schedules](/concepts/automation/schedules) and/or [sensors](/concepts/partitions-schedules-sensors/sensors) +- [Alerts](/dagster-plus/managing-deployments/alerts) (Dagster+ only) + +--- + +## Defining freshness checks for source assets + +In this section, we'll demonstrate how to implement freshness checks for source assets. Source assets are assets whose data feeds a Dagster pipeline, but aren't materialized by Dagster. + +### Step 1: Track the asset's last update time + +To run freshness checks on source assets, the checks need to know when the source assets were last updated. [Observable source assets](/concepts/assets/asset-observations#observable-source-assets) can be used to track the update times of these assets. + +The following example implements this using the decorator. In it, multiple Snowflake tables are backed by an observation function that queries Snowflake to find the most recent time the tables were updated. The function yields the update time as metadata to be stored in the Dagster event log: + +```python file=/concepts/assets/asset_checks/source_data_freshness_in_pieces.py startafter=start_multi_observable_marker endbefore=end_multi_observable_marker +from dagster_snowflake import SnowflakeResource, fetch_last_updated_timestamps + +from dagster import ( + AssetSpec, + MetadataValue, + ObserveResult, + multi_observable_source_asset, +) + +TABLE_SCHEMA = "PUBLIC" +table_names = ["charges", "customers"] +asset_specs = [AssetSpec(table_name) for table_name in table_names] + + +@multi_observable_source_asset(specs=asset_specs) +def source_tables(snowflake: SnowflakeResource): + with snowflake.get_connection() as conn: + freshness_results = fetch_last_updated_timestamps( + snowflake_connection=conn.cursor(), + tables=table_names, + schema=TABLE_SCHEMA, + ) + for table_name, last_updated in freshness_results.items(): + yield ObserveResult( + asset_key=table_name, + metadata={ + "dagster/last_updated_timestamp": MetadataValue.timestamp( + last_updated + ) + }, + ) +``` + +### Step 2: Schedule the observations + +Next, we'll define a [schedule](/concepts/automation/schedules) that regularly executes the function in the `source_tables` observable source asset: + +```python file=/concepts/assets/asset_checks/source_data_freshness_in_pieces.py startafter=start_schedule_marker endbefore=end_schedule_marker +from dagster import AssetSelection, ScheduleDefinition, define_asset_job + +source_tables_observation_schedule = ScheduleDefinition( + job=define_asset_job( + "source_tables_observation_job", + selection=AssetSelection.assets(source_tables), + ), + # Runs every minute. Usually, a much less frequent cadence is necessary, + # but a short cadence makes it easier to play around with this example. + cron_schedule="* * * * *", +) +``` + +When the code location is loaded and the schedule is turned on, it will automatically kick off runs to observe the asset. + +### Step 3: Define the freshness check + + + + +#### Option 1: Use hardcoded parameters + +In our example, we expect the source tables to be updated no less than every two hours. We'll use the function to produce a set of asset checks that fail if an asset’s `last_updated_timestamp` is more than two hours before the current time: + +```python file=/concepts/assets/asset_checks/source_data_freshness_in_pieces.py startafter=start_freshness_checks_marker endbefore=end_freshness_checks_marker +from datetime import timedelta + +from dagster import build_last_update_freshness_checks + +source_table_freshness_checks = build_last_update_freshness_checks( + assets=[source_tables], + lower_bound_delta=timedelta(hours=2), +) +``` + +These checks will automatically execute after the observations of the source assets they target, so an additional schedule isn't needed. + + + + +#### Option 2: Use anomaly detection (Dagster+ Pro) + +A Dagster+ Pro plan is required to use this feature. + +Setting custom freshness policies on a large number of assets can be time-consuming. Dagster+ Pro users can take advantage of a time series anomaly detection model instead of applying policies on an asset-by-asset basis. Freshness checks that use this approach function the same way checks with hardcoded parameters do. + +This model uses data from past materializations/observations to determine if data is arriving later than expected. **Note**: If the asset hasn't been updated enough times, the check will pass with a message indicating that more data is needed to detect anomalies. + +In the following example, we'll use `build_anomaly_detection_freshness_checks` to accomplish this: + +```python +from dagster_cloud.anomaly_detection import build_anomaly_detection_freshness_checks + +freshness_checks = build_anomaly_detection_freshness_checks( + assets=[source_tables], params=None +) +``` + + + + +### Step 4: Create a Definitions object + +The last step is to include the asset, checks, schedule, and resource in a object. This enables Dagster tools to load everything we've defined: + +```python file=/concepts/assets/asset_checks/source_data_freshness_in_pieces.py startafter=start_defs_marker endbefore=end_defs_marker +from dagster import Definitions, EnvVar + +defs = Definitions( + assets=[source_tables], + asset_checks=source_table_freshness_checks, + schedules=[source_tables_observation_schedule], + resources={ + "snowflake": SnowflakeResource( + user=EnvVar("SNOWFLAKE_USER"), + account=EnvVar("SNOWFLAKE_ACCOUNT"), + password=EnvVar("SNOWFLAKE_PASSWORD"), + ) + }, +) +``` + +At this point, the finished code should look like this: + +```python file=/concepts/assets/asset_checks/source_data_freshness_complete.py +from datetime import timedelta + +from dagster_snowflake import SnowflakeResource, fetch_last_updated_timestamps + +from dagster import ( + AssetSelection, + AssetSpec, + Definitions, + EnvVar, + MetadataValue, + ObserveResult, + ScheduleDefinition, + build_last_update_freshness_checks, + define_asset_job, + multi_observable_source_asset, +) + +TABLE_SCHEMA = "PUBLIC" +table_names = ["charges", "customers"] +asset_specs = [AssetSpec(table_name) for table_name in table_names] + + +@multi_observable_source_asset(specs=asset_specs) +def source_tables(snowflake: SnowflakeResource): + with snowflake.get_connection() as conn: + freshness_results = fetch_last_updated_timestamps( + snowflake_connection=conn.cursor(), + tables=table_names, + schema=TABLE_SCHEMA, + ) + for table_name, last_updated in freshness_results.items(): + yield ObserveResult( + asset_key=table_name, + metadata={ + "dagster/last_updated_timestamp": MetadataValue.timestamp( + last_updated + ) + }, + ) + + +source_tables_observation_schedule = ScheduleDefinition( + job=define_asset_job( + "source_tables_observation_job", + selection=AssetSelection.assets(source_tables), + ), + # Runs every minute. Usually, a much less frequent cadence is necessary, + # but a short cadence makes it easier to play around with this example. + cron_schedule="* * * * *", +) + + +source_table_freshness_checks = build_last_update_freshness_checks( + assets=[source_tables], + lower_bound_delta=timedelta(hours=2), +) + + +defs = Definitions( + assets=[source_tables], + asset_checks=source_table_freshness_checks, + schedules=[source_tables_observation_schedule], + resources={ + "snowflake": SnowflakeResource( + user=EnvVar("SNOWFLAKE_USER"), + account=EnvVar("SNOWFLAKE_ACCOUNT"), + password=EnvVar("SNOWFLAKE_PASSWORD"), + ) + }, +) +``` + +From here, you can [view and manually execute the checks in the Dagster UI](#freshness-checks-in-the-dagster-ui). + +--- + +## Defining freshness checks for materializable assets + +In this section, we'll demonstrate how to implement freshness checks for materializable assets. These are assets that are materialized by a Dagster pipeline. + +### Step 1: Define the freshness check + +In this example, we'll use the function to produce an asset check. This check will fail if an asset’s latest materialization is more than two hours before the current time: + +```python file=/concepts/assets/asset_checks/materializable_freshness_in_pieces.py startafter=start_checks_marker endbefore=end_checks_marker +from datetime import timedelta + +from dagster import asset, build_last_update_freshness_checks + + +@asset +def my_asset(): ... + + +asset1_freshness_checks = build_last_update_freshness_checks( + assets=[my_asset], lower_bound_delta=timedelta(hours=2) +) +``` + +### Step 2: Create a sensor to execute the check + +A [schedule](/concepts/automation/schedules) or [sensor](/concepts/partitions-schedules-sensors/sensors) is required to ensure the freshness check executes. If the check only runs after the asset has been materialized, the check won't be able to detect the times materialization fails. + +In this example, we'll use to create a sensor to automatically run the check. Based on the check's parameters and last run time, the sensor will run the check when enough time has elapsed that the asset might fail the check: + +```python file=/concepts/assets/asset_checks/materializable_freshness_in_pieces.py startafter=start_sensor_marker endbefore=end_sensor_marker +from dagster import build_sensor_for_freshness_checks + +freshness_checks_sensor = build_sensor_for_freshness_checks( + freshness_checks=asset1_freshness_checks +) +``` + +### Step 3: Create a Definitions object + +The last step is to include the asset, check, and sensor in a object. This enables Dagster tools to load everything we've defined: + +```python file=/concepts/assets/asset_checks/materializable_freshness_in_pieces.py startafter=start_defs_marker endbefore=end_defs_marker +from dagster import Definitions + +defs = Definitions( + assets=[my_asset], + asset_checks=asset1_freshness_checks, + sensors=[freshness_checks_sensor], +) +``` + +At this point, the finished code should look like this: + +```python file=/concepts/assets/asset_checks/materializable_freshness_complete.py +from datetime import timedelta + +from dagster import ( + Definitions, + asset, + build_last_update_freshness_checks, + build_sensor_for_freshness_checks, +) + + +@asset +def my_asset(): ... + + +asset1_freshness_checks = build_last_update_freshness_checks( + assets=[my_asset], lower_bound_delta=timedelta(hours=2) +) +freshness_checks_sensor = build_sensor_for_freshness_checks( + freshness_checks=asset1_freshness_checks +) +defs = Definitions( + assets=[my_asset], + asset_checks=asset1_freshness_checks, + sensors=[freshness_checks_sensor], +) +``` + +From here, you can [view and manually execute the checks in the Dagster UI](#freshness-checks-in-the-dagster-ui). + +--- + +## Freshness checks in the Dagster UI + +To view a freshness check, navigate to the **Asset details** page for the asset and click the **Checks tab**. + +This page allows you to see metadata about the check and its latest execution. You can also manually execute the check by clicking the **Execute** button. + + + + + +--- + +## Alerting on overdue assets (Dagster+ only) + +In Dagster+, you can set up alerts to notify you when assets are overdue for an update. By default, freshness checks will fail with a severity of `WARN`, but you can override this to fail with `ERROR`. + +To alert on overdue assets, create an alert policy with the following settings: + + + +--- + +## APIs in this guide + +| Name | Description | +| ----------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| | A decorator used to define asset checks that execute in their own [op](/concepts/ops-jobs-graphs/ops). | +| | A function that constructs a freshness check definition for each provided asset | +| | A function that constructs a freshness check definition for each provided time-window partitioned asset | +| | Builds a sensor that triggers evaluation of freshness checks | +| | An object representing a successful observation of an asset. Used with observable source assets to pass metadata. | + +--- + +## Related + + + + + + + + diff --git a/docs/content/concepts/assets/asset-checks/define-execute-asset-checks.mdx b/docs/content/concepts/assets/asset-checks/define-execute-asset-checks.mdx new file mode 100644 index 0000000000000..917a686a2878e --- /dev/null +++ b/docs/content/concepts/assets/asset-checks/define-execute-asset-checks.mdx @@ -0,0 +1,443 @@ +--- +title: "Defining and executing asset checks | Dagster Docs" +description: Asset checks are a way to define expectations about the quality of your data. +--- + +# Defining and executing asset checks + +After creating some [asset definitions](/concepts/assets/software-defined-assets), you may want to automate checks on the assets that test for data quality. + +In this guide, we'll show you a few approaches to defining asset checks, how to use check results to include helpful information, and how to execute checks. + +--- + +## Defining asset checks + +There are a few ways you can define an asset check: + +- **Separately from the assets the checks target** - In this approach, asset materialization and asset checks are executed in their own separate operations. If using the , this allows you to launch runs that will use separate processes to materialize the asset and execute its check. +- **Together with assets** - In this approach, checks execute in the same operation that materializes the asset. +- **Using an asset check factory** - This approach allows you to define multiple, similar asset checks at once +- **Loading dbt tests into Dagster** - This approach allows you to model your dbt tests as asset checks + +### Defining checks separately from assets + +In this example, we'll demonstrate how to define separate functions for an asset and the corresponding check. + +The following code defines an asset named `orders` and an asset check named `orders_id_has_no_nulls`. When executed, the check verifies that all values in the `orders` asset's primary key column are unique. + +```python file=/concepts/assets/asset_checks/orders_check.py +import pandas as pd + +from dagster import AssetCheckResult, Definitions, asset, asset_check + + +@asset +def orders(): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + orders_df.to_csv("orders.csv") + + +@asset_check(asset=orders) +def orders_id_has_no_nulls(): + orders_df = pd.read_csv("orders.csv") + num_null_order_ids = orders_df["order_id"].isna().sum() + return AssetCheckResult( + passed=bool(num_null_order_ids == 0), + ) + + +defs = Definitions( + assets=[orders], + asset_checks=[orders_id_has_no_nulls], +) +``` + +The decorator decorates the `orders_id_has_no_nulls` function which returns an object. + +The `orders_id_has_no_nulls` check runs in its own [op](/concepts/ops-jobs-graphs/ops). That means that if you launch a run that does all of the following, the check will execute in a separate process from the process that materializes the asset: + +1. Materializes the `orders` asset, +2. Executes the `orders_id_has_no_nulls` check, and +3. You're using the + +#### Defining multiple checks + +Using , you can define multiple checks that execute in a single operation without an asset. This approach avoids the overhead of running a separate operation for every check and may enable reusing computations across checks. + +```python file=/concepts/assets/asset_checks/multi_asset_check.py +from typing import Iterable + +from dagster import ( + AssetCheckExecutionContext, + AssetCheckResult, + AssetCheckSeverity, + AssetCheckSpec, + multi_asset_check, +) + + +@multi_asset_check( + specs=[ + AssetCheckSpec(name="asset_check_one", asset="my_asset_one"), + AssetCheckSpec(name="asset_check_two", asset="my_asset_two"), + ] +) +def the_check(context: AssetCheckExecutionContext) -> Iterable[AssetCheckResult]: + yield AssetCheckResult( + passed=False, + severity=AssetCheckSeverity.WARN, + description="The asset is over 0.5", + asset_key="asset_check_one", + ) + + yield AssetCheckResult( + passed=True, + description="The asset is fresh.", + asset_key="asset_check_two", + ) +``` + +You can enable subsetting of checks defined in a to exclude certain checks. Refer to the [Subsetting asset checks](/concepts/assets/asset-checks/subsetting-asset-checks) guide for more information. + +### Defining checks and assets together + +Sometimes, it makes sense for a single function to materialize an asset **and** execute a check on it. + +In this example, we'll demonstrate how to do this by using the `check_specs` argument. This argument is available when using the or decorators. Each provided declares a check that the decorated function should yield an for. + +```python file=/concepts/assets/asset_checks/asset_with_check.py +import pandas as pd + +from dagster import ( + AssetCheckResult, + AssetCheckSpec, + AssetExecutionContext, + Definitions, + Output, + asset, +) + + +@asset(check_specs=[AssetCheckSpec(name="orders_id_has_no_nulls", asset="orders")]) +def orders(context: AssetExecutionContext): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + + # save the output and indicate that it's been saved + orders_df.to_csv("orders") + yield Output(value=None) + + # check it + num_null_order_ids = orders_df["order_id"].isna().sum() + yield AssetCheckResult( + passed=bool(num_null_order_ids == 0), + ) + + +defs = Definitions(assets=[orders]) +``` + +### Using a factory + +To define multiple, similar asset checks, use a factory pattern. In the following example, the factory accepts a list of SQL statements and turns them into asset checks: + +```python file=/concepts/assets/asset_checks/factory.py +from typing import Any, Mapping, Sequence + +from mock import MagicMock + +from dagster import ( + AssetCheckResult, + AssetChecksDefinition, + Definitions, + asset, + asset_check, +) + + +@asset +def orders(): ... + + +@asset +def items(): ... + + +def make_check(check_blob: Mapping[str, str]) -> AssetChecksDefinition: + @asset_check( + name=check_blob["name"], + asset=check_blob["asset"], + required_resource_keys={"db_connection"}, + ) + def _check(context): + rows = context.resources.db_connection.execute(check_blob["sql"]) + return AssetCheckResult(passed=len(rows) == 0, metadata={"num_rows": len(rows)}) + + return _check + + +check_blobs = [ + { + "name": "orders_id_has_no_nulls", + "asset": "orders", + "sql": "select * from orders where order_id is null", + }, + { + "name": "items_id_has_no_nulls", + "asset": "items", + "sql": "select * from items where item_id is null", + }, +] + +defs = Definitions( + assets=[orders, items], + asset_checks=[make_check(check_blob) for check_blob in check_blobs], + resources={"db_connection": MagicMock()}, +) +``` + +### Loading dbt tests as asset checks + +Using the , you can model your existing dbt tests as asset checks. Refer to the [dbt integration reference](/integrations/dbt/reference#loading-dbt-tests-as-asset-checks) for more information. + +--- + +## Using asset check results + +In this section, we'll show you how to use asset check results to: + +- [Customize the display of results in the Dagster UI](#customizing-the-dagster-ui) +- [Block downstream assets](#blocking-downstream-assets) + +### Customizing the Dagster UI + +Using asset check results, you can display how check-related information displays in the Dagster UI. + +#### Setting severity + +Using , you can define a severity on check results. The default severity is `ERROR`. + +The severity determines how the check result will display in the UI. For example, if a check fails with `ERROR` severity, the asset will appear red in the lineage graph in the UI. + +```python file=/concepts/assets/asset_checks/severity.py +from dagster import ( + AssetCheckResult, + AssetCheckSeverity, + Definitions, + asset, + asset_check, +) + + +@asset +def my_asset(): ... + + +@asset_check(asset=my_asset) +def my_check(): + is_serious = ... + return AssetCheckResult( + passed=False, + severity=AssetCheckSeverity.ERROR if is_serious else AssetCheckSeverity.WARN, + ) + + +defs = Definitions(assets=[my_asset], asset_checks=[my_check]) +``` + +#### Adding metadata + +Including details about a check result can provide helpful context to others who view it in the UI. Using the `metadata` argument on , you can include information about why a check passed or failed. + +In the following example, we added `num_null_order_ids` as metadata to the `orders_id_has_no_nulls` check: + +```python file=/concepts/assets/asset_checks/metadata.py +import pandas as pd + +from dagster import AssetCheckResult, Definitions, asset, asset_check + + +@asset +def orders(): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + orders_df.to_csv("orders.csv") + + +@asset_check(asset=orders, description="Check for null order_ids") +def orders_id_has_no_nulls(): + orders_df = pd.read_csv("orders.csv") + num_null_order_ids = orders_df["order_id"].isna().sum() + return AssetCheckResult( + passed=bool(num_null_order_ids == 0), + metadata={ + "num_null_order_ids": int(num_null_order_ids), + }, + ) + + +defs = Definitions( + assets=[orders], + asset_checks=[orders_id_has_no_nulls], +) +``` + +There are a variety of types supported via the class. You can view the metadata on the **Checks** tab of the **Asset details** page. + +### Blocking downstream assets + +To block downstream assets from executing when checks fail, set the `blocking` argument to `True` in the decorator. In the following example, `check_upstream_asset` will block `downstream_asset` from executing. + +```python file=/concepts/assets/asset_checks/blocking.py +from dagster import AssetCheckResult, Definitions, asset, asset_check + + +@asset +def upstream_asset(): + pass + + +@asset_check(asset=upstream_asset, blocking=True) +def check_upstream_asset(): + return AssetCheckResult(passed=False) + + +@asset(deps=[upstream_asset]) +def downstream_asset(): + pass + + +defs = Definitions( + assets=[upstream_asset, downstream_asset], asset_checks=[check_upstream_asset] +) +``` + +When `blocking` is enabled, downstream assets will wait to execute until the check completes: + +- **Downstream assets will not execute** if the check returns a failing or raises an exception +- **Downstream assets will execute** if the is set to `WARN` instead of `ERROR` + +This feature has the following limitations: + +- **`blocking` is currently only supported with .** [For checks defined in the same operation as assets](#defining-checks-and-assets-together), you can explicitly raise an exception to block downstream execution. +- **Assets with an currently do not respect blocking asset checks** and will execute even if a blocking check on an upstream asset failed. + +--- + +## Executing checks + +### Via the UI + +Materializing an asset from the UI will also execute any checks defined for that asset. To execute a check without materializing the asset, use the **Checks** tab of the **Asset's details** page. + +### Via sensors and schedules + +To define jobs that execute sets of assets and checks, you can use and then trigger the jobs via [sensors](/concepts/partitions-schedules-sensors/sensors) or [schedules](/concepts/automation/schedules). By default, checks are included with the assets they check. You can also define jobs that include only checks, or only assets. + +```python file=/concepts/assets/asset_checks/jobs.py +from dagster import ( + AssetSelection, + Definitions, + ScheduleDefinition, + asset, + asset_check, + define_asset_job, +) + + +@asset +def my_asset(): ... + + +@asset_check(asset=my_asset) +def check_1(): ... + + +@asset_check(asset=my_asset) +def check_2(): ... + + +# includes my_asset and both checks +my_job = define_asset_job("my_job", selection=AssetSelection.assets(my_asset)) + + +# includes only my_asset +my_asset_only_job = define_asset_job( + "my_asset_only_job", + selection=AssetSelection.assets(my_asset).without_checks(), +) + +# includes check_1 and check_2, but not my_asset +checks_only_job = define_asset_job( + "checks_only_job", selection=AssetSelection.checks_for_assets(my_asset) +) + +# includes only check_1 +check_1_job = define_asset_job("check_1_job", selection=AssetSelection.checks(check_1)) + +# schedule my_job to run every day at midnight +basic_schedule = ScheduleDefinition(job=my_job, cron_schedule="0 0 * * *") + +defs = Definitions( + assets=[my_asset], + asset_checks=[check_1, check_2], + jobs=[my_job, my_asset_only_job, checks_only_job, check_1_job], + schedules=[basic_schedule], +) +``` + +--- + +## Testing checks + +Refer to the [Asset checks section](/concepts/testing#testing-asset-checks) of the [Testing](/concepts/testing) documentation for more information. + +--- + +## Alerting on failed checks (Dagster+ only) + +In Dagster+, you can set up alerts to notify you when assets checks fail. To alert on failed checks, create an alert policy with the following settings: + + + +--- + +## APIs in this guide + +| Name | Description | +| ------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | +| | A decorator used to define asset checks that execute in their own [op](/concepts/ops-jobs-graphs/ops). | +| | A decorator used to define a set of asset checks that execute together in the same [op](/concepts/ops-jobs-graphs/ops). | +| | The class returned by asset checks. | +| | Defines the severity of a given asset check result. | +| | A class that's passed to asset decorators to define checks that execute in the same [op](/concepts/ops-jobs-graphs/ops) as the asset. | + +--- + +## Related + + + + + + + + + diff --git a/docs/content/concepts/assets/asset-checks/subsetting-asset-checks.mdx b/docs/content/concepts/assets/asset-checks/subsetting-asset-checks.mdx new file mode 100644 index 0000000000000..f5bee5b7c5519 --- /dev/null +++ b/docs/content/concepts/assets/asset-checks/subsetting-asset-checks.mdx @@ -0,0 +1,153 @@ +--- +title: " Subsetting asset checks | Dagster Docs" +description: "Learn how to execute subsets of asset checks." +--- + +# Subsetting asset checks + +In some cases, you may only want to execute some of the [asset checks](/concepts/assets/asset-checks) defined in a or . For example, you may want to materialize a without executing the checks or only execute a certain set of checks. + +In this guide, we'll show you a few approaches to subsetting asset checks in and . + +--- + +## Prerequisites + +Before continuing, you should be familiar with: + +- [Asset checks](/concepts/assets/asset-checks) +- [Multi-assets](/concepts/assets/multi-assets) + +--- + +## Subsetting checks in @multi_asset_checks + +Using the decorator's `specs` and `can_subset` arguments, you can execute a subset of checks in a single op. + +Inside the body of the function, we can use to identify which computations to run. We can also set the decorator's `can_subset` parameter to `True` to execute a subset of the asset checks that the computation contains. + +As we don't know in advance which checks will be executed, we explicitly `yield` each asset check result that we're expected to create: + +```python file=/concepts/assets/asset_checks/subset_multi_asset_check.py +from typing import Iterable + +from dagster import ( + AssetCheckExecutionContext, + AssetCheckKey, + AssetCheckResult, + AssetCheckSpec, + AssetKey, + multi_asset_check, +) + + +@multi_asset_check( + specs=[ + AssetCheckSpec(name="asset_check_one", asset="asset_one"), + AssetCheckSpec(name="asset_check_two", asset="asset_two"), + ], + can_subset=True, +) +def the_check(context: AssetCheckExecutionContext) -> Iterable[AssetCheckResult]: + if ( + AssetCheckKey(AssetKey("asset_one"), "asset_check_one") + in context.selected_asset_check_keys + ): + yield AssetCheckResult( + passed=True, metadata={"foo": "bar"}, check_name="asset_check_one" + ) + if ( + AssetCheckKey(AssetKey("asset_two"), "asset_check_two") + in context.selected_asset_check_keys + ): + yield AssetCheckResult( + passed=True, metadata={"foo": "bar"}, check_name="asset_check_two" + ) +``` + +--- + +## Subsetting checks in @multi_assets + +When using [multi-assets](/concepts/assets/multi-assets), Dagster assumes that all checks specified on the asset should be executed after it is materialized. This means that attempting to execute some, but not all, of the checks defined by a multi-asset will result in an error. + +In the following example, we only want to execute a check when the `multi_asset_piece_1` asset produced by the `multi_asset_1_and_2` multi-asset is materialized: + +```python file=/concepts/assets/asset_checks/subset_check_multi_asset.py +from dagster import ( + AssetCheckKey, + AssetCheckResult, + AssetCheckSpec, + AssetExecutionContext, + AssetKey, + AssetSpec, + MaterializeResult, + multi_asset, +) + + +@multi_asset( + specs=[ + AssetSpec("multi_asset_piece_1", group_name="asset_checks", skippable=True), + AssetSpec("multi_asset_piece_2", group_name="asset_checks", skippable=True), + ], + check_specs=[AssetCheckSpec("my_check", asset="multi_asset_piece_1")], + can_subset=True, +) +def multi_asset_1_and_2(context: AssetExecutionContext): + if AssetKey("multi_asset_piece_1") in context.selected_asset_keys: + yield MaterializeResult(asset_key="multi_asset_piece_1") + # The check will only execute when multi_asset_piece_1 is materialized + if ( + AssetCheckKey(AssetKey("multi_asset_piece_1"), "my_check") + in context.selected_asset_check_keys + ): + yield AssetCheckResult(passed=True, metadata={"foo": "bar"}) + if AssetKey("multi_asset_piece_2") in context.selected_asset_keys: + # No check on multi_asset_piece_2 + yield MaterializeResult(asset_key="multi_asset_piece_2") +``` + +Let's review what we did to accomplish this: + +- In the decorator: + - For each in the decorator's `specs` argument, set the `skippable` parameter on to `True`. This allows the asset to be skipped when the multi-asset is materialized. + - Set the decorator's `can_subset` parameter to `True`, allowing a subset of the computation's assets to be executed +- Use to identify which computations to run +- For each asset the multi-asset could create, explicitly `yield` a as we don't know in advance which assets will be executed +- Use to determine which asset check to run. In this example, the `my_check` check will only execute when `multi_asset_piece_1` is materialized. + +--- + +## APIs in this guide + +| Name | Description | +| ------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | +| | A decorator used to define a set of asset checks that execute together in the same [op](/concepts/ops-jobs-graphs/ops). | +| | The class returned by asset checks. | +| | Defines the severity of a given asset check result. | +| | A class that's passed to asset decorators to define checks that execute in the same [op](/concepts/ops-jobs-graphs/ops) as the asset. | +| | A decorator used to define [multi-assets](/concepts/assets/multi-assets). | + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/assets/asset-jobs.mdx b/docs/content/concepts/assets/asset-jobs.mdx index 4c277a9017602..72a7fe840a163 100644 --- a/docs/content/concepts/assets/asset-jobs.mdx +++ b/docs/content/concepts/assets/asset-jobs.mdx @@ -1,75 +1,84 @@ --- title: Asset jobs | Dagster -description: Asset jobs are the main unit for materializing and monitoring Software-defined assets in Dagster. +description: Asset jobs are the main unit for materializing and monitoring asset definitions in Dagster. --- # Asset jobs Looking to execute a graph of{" "} - ops, which aren't tied to - Software-defined Assets? Check out the{" "} + ops, which aren't tied to asset + definitions? Check out the{" "} Op jobs documentation. -[Jobs](/concepts/ops-jobs-graphs/jobs) are the main unit for executing and monitoring Software-defined assets in Dagster. An asset job materializes a selection of [Software-defined Assets](/concepts/assets/software-defined-assets). It can be launched in a few different ways: +[Jobs](/concepts/ops-jobs-graphs/jobs) are the main unit for executing and monitoring [asset definitions](/concepts/assets/software-defined-assets) in Dagster. An asset job is a type of [job](/concepts/ops-jobs-graphs/jobs) that targets a selection of assets and can be launched: - Manually from the Dagster UI -- At fixed intervals, by [schedules](/concepts/partitions-schedules-sensors/schedules) +- At fixed intervals, by [schedules](/concepts/automation/schedules) - When external changes occur, using [sensors](/concepts/partitions-schedules-sensors/sensors) --- -## Relevant APIs +## Creating asset jobs -| Name | Description | -| -------------------------------------- | --------------------------------------------------------- | -| | A function for defining a job from a selection of assets. | +In this section, we'll demonstrate how to create a few asset jobs that target the following assets: ---- +```python file=/concepts/assets/non_argument_deps.py startafter=start_marker endbefore=end_marker +from dagster import asset -## Creating asset jobs -Asset jobs materialize a fixed set of assets each time they run. Additionally, multiple jobs can target overlapping sets of assets: +@asset +def sugary_cereals() -> None: + execute_query( + "CREATE TABLE sugary_cereals AS SELECT * FROM cereals WHERE sugar_grams > 10" + ) -```python file=/concepts/assets/build_job.py startafter=start_job_only_marker endbefore=end_job_only_marker -from dagster import Definitions, asset, define_asset_job +@asset(deps=[sugary_cereals]) +def shopping_list() -> None: + execute_query("CREATE TABLE shopping_list AS SELECT * FROM sugary_cereals") +``` -@asset -def asset1(): - return [1, 2, 3] +To create an asset job, use . An asset-based job is based on the [assets](/concepts/assets/software-defined-assets) the job targets and their dependencies. +You can target one or multiple assets, or create multiple jobs that target overlapping sets of assets. In the following example, we have two jobs: -@asset -def asset2(asset1): - return asset1 + [4] +- `all_assets_job` targets all assets +- `sugary_cereals_job` targets only the `sugary_cereals` asset + +```python file=/concepts/assets/build_job.py startafter=start_marker endbefore=end_marker +from dagster import Definitions, define_asset_job all_assets_job = define_asset_job(name="all_assets_job") -asset1_job = define_asset_job(name="asset1_job", selection="asset1") +sugary_cereals_job = define_asset_job( + name="sugary_cereals_job", selection="sugary_cereals" +) defs = Definitions( - assets=[asset1, asset2], - jobs=[all_assets_job, asset1_job], + assets=[sugary_cereals, shopping_list], + jobs=[all_assets_job, sugary_cereals_job], ) ``` -The topology of an asset-based job is based on the [assets](/concepts/assets/software-defined-assets) and their dependencies. - --- ## Making asset jobs available to Dagster tools -You make asset jobs available to the UI, GraphQL, and the command line by including them in a object at the top level of a Python module or file. The tool loads that module as a code location. If you include schedules or sensors, the [code location](/concepts/code-locations) will automatically include jobs that those schedules or sensors target. +Including the jobs in a object located at the top level of a Python module or file makes asset jobs available to the UI, GraphQL, and the command line. The Dagster tool loads that module as a code location. If you include schedules or sensors, the [code location](/concepts/code-locations) will automatically include jobs that those schedules or sensors target. ```python file=/concepts/assets/jobs_to_definitions.py -from dagster import Definitions, asset, define_asset_job +from dagster import Definitions, MaterializeResult, asset, define_asset_job @asset def number_asset(): - return [1, 2, 3] + yield MaterializeResult( + metadata={ + "number": 1, + } + ) number_asset_job = define_asset_job(name="number_asset_job", selection="number_asset") @@ -99,8 +108,14 @@ You can run an asset job in a variety of ways: --- -## See it in action +## Reference + +### Relevant APIs + +| Name | Description | +| -------------------------------------- | --------------------------------------------------------- | +| | A function for defining a job from a selection of assets. | -For more examples of asset jobs, check out the following in our [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): +## Examples -- [Building an asset job that targets an asset group](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py) +Check this code in the [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured) that [builds an asset job that targets an asset group](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py). diff --git a/docs/content/concepts/assets/asset-observations.mdx b/docs/content/concepts/assets/asset-observations.mdx index 43b0154114fe1..37f8fda98565e 100644 --- a/docs/content/concepts/assets/asset-observations.mdx +++ b/docs/content/concepts/assets/asset-observations.mdx @@ -55,7 +55,7 @@ height={917} There are a variety of types of metadata that can be associated with an observation event, all through the class. Each observation event optionally takes a dictionary of metadata that is then displayed in the event log and the [Asset Details](/concepts/webserver/ui#asset-details) page. Check our API docs for for more details on the types of event metadata available. ```python file=concepts/assets/observations.py startafter=start_observation_asset_marker_2 endbefore=end_observation_asset_marker_2 -from dagster import AssetMaterialization, AssetObservation, MetadataValue, op +from dagster import AssetObservation, MetadataValue, op @op @@ -75,7 +75,6 @@ def observes_dataset_op(context: OpExecutionContext): }, ) ) - context.log_event(AssetMaterialization(asset_key="my_dataset")) return remote_storage_path ``` @@ -93,7 +92,7 @@ height={1146} If you are observing a single slice of an asset (e.g. a single day's worth of data on a larger table), rather than mutating or creating it entirely, you can indicate this to Dagster by including the `partition` argument on the object. ```python file=/concepts/assets/observations.py startafter=start_partitioned_asset_observation endbefore=end_partitioned_asset_observation -from dagster import AssetMaterialization, Config, op, OpExecutionContext +from dagster import Config, op, OpExecutionContext class MyOpConfig(Config): @@ -141,7 +140,7 @@ def foo_source_asset(): When the file content changes, the hash and therefore the data version will change - this will notify Dagster that downstream assets derived from an older value (i.e. a different data version) of this source asset might need to be updated. -Source asset observations can be triggered via the "Observe sources" button in the the UI graph explorer view. Note that this button will only be visible if at least one source asset in the current graph defines an observation function. +Source asset observations can be triggered via the "Observe sources" button in the UI graph explorer view. Note that this button will only be visible if at least one source asset in the current graph defines an observation function. observable-source-assetASSET_KEY* - An asterisk (*) preceding an asset key selects an asset and + An asterisk (*) following an asset key selects an asset and all of its downstream dependencies @@ -109,7 +109,7 @@ A query includes a list of clauses. Clauses are separated by commas, except in t A plus sign (+) following an asset key selects an asset and - one layer upstream of the asset. + one layer downstream of the asset.



Including multiple +s will select that number of downstream @@ -547,7 +547,7 @@ height={1508} objects that represent external assets | -| | An object that represents the metadata of a particular asset | +| Name | Description | +| ------------------------------- | ---------------------------------------------------------------------------------------------------- | +| | An object that represents the metadata of a particular asset without representing how it's computed. | --- ## Defining external assets -The following code declares a single external asset that represents a file in S3 and passes it to a object: +External assets are defined using the class. An represents metadata about an asset without describing how it's computed. The following code declares a single external asset that represents a file in S3 and passes it to a object: @@ -54,9 +47,9 @@ The following code declares a single external asset that represents a file in S3 Click the **Asset in the Dagster UI** tab to see how this asset would be rendered in the Dagster UI. ```python file=/concepts/assets/external_assets/single_declaration.py -from dagster import AssetSpec, Definitions, external_asset_from_spec +from dagster import AssetSpec, Definitions -defs = Definitions(assets=[external_asset_from_spec(AssetSpec("file_in_s3"))]) +defs = Definitions(assets=[AssetSpec("file_in_s3")]) ``` --- @@ -94,12 +87,12 @@ In the following example, we have two assets: `raw_logs` and `processed_logs`. T Click the **Assets in the Dagster UI** tab to see how these assets would be rendered in the Dagster UI. ```python file=/concepts/assets/external_assets/external_asset_deps.py -from dagster import AssetSpec, Definitions, external_assets_from_specs +from dagster import AssetSpec, Definitions raw_logs = AssetSpec("raw_logs") processed_logs = AssetSpec("processed_logs", deps=[raw_logs]) -defs = Definitions(assets=external_assets_from_specs([raw_logs, processed_logs])) +defs = Definitions(assets=[raw_logs, processed_logs]) ``` --- @@ -133,7 +126,7 @@ Fully-managed assets can depend on external assets. In this example, the `aggreg Click the **Assets in the Dagster UI** tab to see how these assets would be rendered in the Dagster UI. ```python file=/concepts/assets/external_assets/normal_asset_depending_on_external.py -from dagster import AssetSpec, Definitions, asset, external_assets_from_specs +from dagster import AssetSpec, Definitions, asset raw_logs = AssetSpec("raw_logs") processed_logs = AssetSpec("processed_logs", deps=[raw_logs]) @@ -145,9 +138,7 @@ def aggregated_logs() -> None: ... -defs = Definitions( - assets=[aggregated_logs, *external_assets_from_specs([raw_logs, processed_logs])] -) +defs = Definitions(assets=[aggregated_logs, raw_logs, processed_logs]) ``` @@ -180,90 +171,7 @@ To keep your external assets updated, you can use any of the following approache ### Using the REST API -Whether you're using Dagster OSS or Dagster Cloud, you can use a REST endpoint for reporting asset materializations. The API also has endpoints for reporting [asset observations](/concepts/assets/asset-observations) and [asset check evaluations](/concepts/assets/asset-checks). - -Refer to the following tabs for examples using `curl` and Python to communicate with the API. - -#### Using curl - - - - -##### Dagster Cloud - -```bash -curl --request POST \ - --url https://{organization}.dagster.cloud/{deployment}/report_asset_materialization/{asset_key} \ - --header 'Content-Type: application/json' \ - --header 'Dagster-Cloud-Api-Token: {token}' \ - --data '{ - "metadata" : { - "source": "From curl command" - } -}' -``` - ---- - - - - -##### Dagster OSS - -```bash -curl --request POST \ - --url https://{dagster_webserver_host}/report_asset_materialization/{asset_key} \ - --header 'Content-Type: application/json' \ - --data '{ - "metadata" : { - "source": "From curl command" - } -}' -``` - ---- - - - - -#### Using Python - - - - -##### Dagster Cloud - -```python -import requests - -url = f"https://{organization}.dagster.cloud/{deployment}/report_asset_materialization/{asset_key}" -payload = { "metadata": { "source": "From python script" } } -headers = { "Content-Type": "application/json", "Dagster-Cloud-Api-Token": "{token}" } - -response = requests.request("POST", url, json=payload, headers=headers) -``` - ---- - - - - -##### Dagster OSS - -```python -import requests - -url = f"https://{dagster_webserver_host}/report_asset_materialization/{asset_key}" -payload = { "metadata": { "source": "From python script" } } -headers = { "Content-Type": "application/json" } - -response = requests.request("POST", url, json=payload, headers=headers) -``` - ---- - - - +Whether you're using Dagster OSS or Dagster+, you can use a REST endpoint for reporting asset materializations, asset check evaluations, and asset observations. Refer to the [External assets REST API reference](/apidocs/external-assets-rest) for more information and examples. ### Using sensors @@ -278,7 +186,6 @@ from dagster import ( Definitions, SensorEvaluationContext, SensorResult, - external_asset_from_spec, sensor, ) @@ -303,7 +210,7 @@ def keep_external_asset_a_up_to_date(context: SensorEvaluationContext) -> Sensor defs = Definitions( - assets=[external_asset_from_spec(AssetSpec("external_asset_a"))], + assets=[AssetSpec("external_asset_a")], sensors=[keep_external_asset_a_up_to_date], ) ``` @@ -335,7 +242,6 @@ from dagster import ( AssetSpec, Definitions, OpExecutionContext, - external_asset_from_spec, job, op, ) @@ -351,9 +257,7 @@ def a_job() -> None: an_op() -defs = Definitions( - assets=[external_asset_from_spec(AssetSpec("external_asset"))], jobs=[a_job] -) +defs = Definitions(assets=[AssetSpec("external_asset")], jobs=[a_job]) ``` --- @@ -362,7 +266,7 @@ defs = Definitions( decorator. This decorator functions similarly to the decorator, but requires an `outs` parameter specifying each output asset of the function. +The easiest way to create a multi-asset is with the decorator. This decorator functions similarly to the decorator, but requires a `specs` parameter specifying each asset that the function materializes. ```python file=/concepts/assets/multi_assets.py startafter=start_basic_multi_asset endbefore=end_basic_multi_asset -from dagster import AssetOut, multi_asset, AssetExecutionContext +from dagster import AssetSpec, multi_asset -@multi_asset( - outs={ - "my_string_asset": AssetOut(), - "my_int_asset": AssetOut(), - } -) +@multi_asset(specs=[AssetSpec("users"), AssetSpec("orders")]) def my_function(): - return "abc", 123 + # some code that writes out data to the users table and the orders table + ... ``` -By default, the names of the outputs will be used to form the asset keys of the multi-asset. The decorated function will be used to create the op for these assets and must emit an output for each of them. In this case, we can emit multiple outputs by returning a tuple of values, one for each asset. - ### Conditional materialization -In some cases, an asset may not need to be updated in storage each time the decorated function is executed. You can use the `is_required` parameter along with `yield` syntax to implement this behavior. +In some cases, an asset may not need to be updated in storage each time the decorated function is executed. You can use the `skippable` parameter along with `yield` syntax and to implement this behavior. + +If the `skippable` parameter is set to `True` on an , and your function does not `yield` a object for that asset, then: -If the `is_required` parameter is set to `False` on an output, and your function does not `yield` an object for that output, then no asset materialization event will be created, the I/O manager will not be invoked, downstream assets will not be materialized, and asset sensors monitoring the asset will not trigger. +- No asset materialization event will be created +- Downstream assets in the same run will not be materialized +- Asset sensors monitoring the asset will not trigger ```python file=/concepts/assets/multi_asset_conditional_materialization.py import random -from dagster import AssetOut, Output, asset, multi_asset +from dagster import AssetSpec, MaterializeResult, asset, multi_asset @multi_asset( - outs={"asset1": AssetOut(is_required=False), "asset2": AssetOut(is_required=False)} + specs=[AssetSpec("asset1", skippable=True), AssetSpec("asset2", skippable=True)] ) def assets_1_and_2(): if random.randint(1, 10) < 5: - yield Output([1, 2, 3, 4], output_name="asset1") + yield MaterializeResult(asset_key="asset1") if random.randint(1, 10) < 5: - yield Output([6, 7, 8, 9], output_name="asset2") + yield MaterializeResult(asset_key="asset2") -@asset -def downstream1(asset1): - # will not run when assets_1_and_2 doesn't materialize the asset1 - return asset1 + [5] +@asset(deps=["asset1"]) +def downstream1(): + """Will not run when assets_1_and_2 doesn't materialize asset1.""" -@asset -def downstream2(asset2): - # will not run when assets_1_and_2 doesn't materialize the asset2 - return asset2 + [10] +@asset(deps=["asset2"]) +def downstream2(): + """Will not run when assets_1_and_2 doesn't materialize asset2.""" ``` ### Subsetting multi-assets By default, it is assumed that the computation inside of a multi-asset will always produce the contents all of the associated assets. This means that attempting to execute a set of assets that produces some, but not all, of the assets defined by a given multi-asset will result in an error. -Sometimes, the underlying computation is sufficiently flexible to allow for computing an arbitrary subset of the assets associated with it. In these cases, set the `is_required` attribute of the outputs to `False`, and set the `can_subset` parameter of the decorator to `True`. +Sometimes, the underlying computation is sufficiently flexible to allow for computing an arbitrary subset of the assets associated with it. In these cases, set the `skippable` attribute of the asset specs to `True`, and set the `can_subset` parameter of the decorator to `True`. -Inside the body of the function, we can use `context.selected_output_names` or `context.selected_asset_keys` to find out which computations should be run. +Inside the body of the function, we can use `context.selected_asset_keys` to find out which assets should be materialized. ```python file=/concepts/assets/multi_assets.py startafter=start_subsettable_multi_asset endbefore=end_subsettable_multi_asset -from dagster import AssetOut, Output, multi_asset +from dagster import AssetExecutionContext, AssetSpec, MaterializeResult, multi_asset @multi_asset( - outs={ - "a": AssetOut(is_required=False), - "b": AssetOut(is_required=False), - }, + specs=[AssetSpec("asset1", skippable=True), AssetSpec("asset2", skippable=True)], can_subset=True, ) def split_actions(context: AssetExecutionContext): - if "a" in context.op_execution_context.selected_output_names: - yield Output(value=123, output_name="a") - if "b" in context.op_execution_context.selected_output_names: - yield Output(value=456, output_name="b") + if "asset1" in context.op_execution_context.selected_asset_keys: + yield MaterializeResult(asset_key="asset1") + if "asset2" in context.op_execution_context.selected_asset_keys: + yield MaterializeResult(asset_key="asset2") ``` -Because our outputs are now optional, we can no longer rely on returning a tuple of values, as we don't know in advance which values will be computed. Instead, we explicitly `yield` each output that we're expected to create. - ### Dependencies inside multi-assets -When a multi-asset is created, it is assumed that each output asset depends on all of the input assets. This may not always be the case, however. - -In these situations, you may optionally provide a mapping from each output asset to the set of s that it depends on. This information is used to display lineage information in the Dagster UI and for parsing selections over your asset graph. +Assets defined within multi-assets can have dependencies on upstream assets. These dependencies can be expressed using the `deps` attribute on . ```python file=/concepts/assets/multi_assets.py startafter=start_asset_deps_multi_asset endbefore=end_asset_deps_multi_asset -from dagster import AssetKey, AssetOut, Output, multi_asset +from dagster import AssetKey, AssetSpec, asset, multi_asset -@multi_asset( - outs={"c": AssetOut(), "d": AssetOut()}, - internal_asset_deps={ - "c": {AssetKey("a")}, - "d": {AssetKey("b")}, - }, -) -def my_complex_assets(a, b): - # c only depends on a - yield Output(value=a + 1, output_name="c") - # d only depends on b - yield Output(value=b + 1, output_name="d") +@asset +def a(): ... + + +@asset +def b(): ... + + +@multi_asset(specs=[AssetSpec("c", deps=["b"]), AssetSpec("d", deps=["a"])]) +def my_complex_assets(): ... ``` ### Multi-asset code versions @@ -139,14 +126,15 @@ Multi-assets may assign different code versions for each of their outputs: ```python file=/concepts/assets/code_versions.py startafter=start_multi_asset endbefore=end_multi_asset @multi_asset( - outs={ - "a": AssetOut(code_version="1"), - "b": AssetOut(code_version="2"), - } + specs=[AssetSpec(key="a", code_version="1"), AssetSpec(key="b", code_version="2")] ) def multi_asset_with_versions(): - yield Output(100, "a") - yield Output(200, "b") + with open("data/a.json", "w") as f: + json.dump(100, f) + yield MaterializeResult("a") + with open("data/b.json", "w") as f: + json.dump(200, f) + yield MaterializeResult("b") ``` Just as with regular assets, these versions are attached to the `AssetMaterialization` objects for each of the constituent assets and represented in the UI. diff --git a/docs/content/concepts/assets/software-defined-assets.mdx b/docs/content/concepts/assets/software-defined-assets.mdx index 05649c42e9f13..3a89fc45e7698 100644 --- a/docs/content/concepts/assets/software-defined-assets.mdx +++ b/docs/content/concepts/assets/software-defined-assets.mdx @@ -1,9 +1,9 @@ --- -title: Software-defined assets | Dagster -description: A software-defined asset is a description of how to compute the contents of a particular data asset. +title: Asset definitions | Dagster +description: An asset definition is a description of how to compute the contents of a particular data asset. --- -# Software-defined assets +# Asset definitions Prefer videos? Check out our{" "} @@ -14,65 +14,68 @@ description: A software-defined asset is a description of how to compute the con demo {" "} - videos to get a quick look at Software-defined assets. + videos to get a quick look at asset definitions. -An **asset** is an object in persistent storage, such as a table, file, or persisted machine learning model. A **software-defined asset** is a description, in code, of an asset that should exist and how to produce and update that asset. +An **asset** is an object in persistent storage, such as a table, file, or persisted machine learning model. An **asset definition** is a description, in code, of an asset that should exist and how to produce and update that asset. -Software-defined assets enable a declarative approach to data management, in which code is the source of truth on what data assets should exist and how those assets are computed. +Asset definitions enable a declarative approach to data management, in which code is the source of truth on what data assets should exist and how those assets are computed. -A software-defined asset includes the following: +An asset definition includes the following: - An , which is a handle for referring to the asset. -- A set of upstream asset keys, which refer to assets that the contents of the software-defined asset are derived from. -- A Python function, which is responsible for computing the contents of the asset from its upstream dependencies. +- A set of upstream asset keys, which refer to assets that the contents of the asset definition are derived from. +- A Python function, which is responsible for computing the contents of the asset from its upstream dependencies and storing the results. - **Note**: Behind-the-scenes, the Python function is an [op](/concepts/ops-jobs-graphs/ops). Ops are an advanced topic that isn't required to get started with Dagster. A crucial distinction between Software-defined Assets and ops is that Software-defined Assets know about their dependencies, while ops do not. Ops aren't connected to dependencies until they're placed inside a [graph](/concepts/ops-jobs-graphs/graphs). + **Note**: Behind-the-scenes, the Python function is an [op](/concepts/ops-jobs-graphs/ops). Ops are an advanced topic that isn't required to get started with Dagster. A crucial distinction between asset definitions and ops is that asset definitions know about their dependencies, while ops do not. Ops aren't connected to dependencies until they're placed inside a [graph](/concepts/ops-jobs-graphs/graphs). -**Materializing** an asset is the act of running its function and saving the results to persistent storage. You can initiate materializations from [the Dagster UI](/concepts/webserver/ui) or by invoking Python APIs. By default, assets are materialized to pickle files on your local filesystem, but materialization behavior is fully customizable using [I/O managers](/concepts/io-management/io-managers#applying-io-managers-to-assets). It's possible to materialize an asset in multiple storage environments, such as production and staging. +**Materializing** an asset is the act of running its function and saving the results to persistent storage. You can initiate materializations from [the Dagster UI](/concepts/webserver/ui) or by invoking Python APIs. --- ## Relevant APIs -| Name | Description | -| ------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| | A decorator used to define assets. | -| | A class that describes an asset, but doesn't define how to compute it. s are used to represent assets that other assets depend on, in settings where they can't be materialized themselves. | +| Name | Description | +| -------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | +| | A decorator used to define assets. | +| | A class that represents a one or more asset definitions, usually backed by a single function that materializes or observes them all together. | --- ## Defining assets -- [Basic software-defined assets](#a-basic-software-defined-asset) +- [Basic asset definitions](#basic-asset-definitions) - [Assets with dependencies](#assets-with-dependencies) - [Graph-backed assets and multi-assets](#graph-backed-assets-and-multi-assets) - [Accessing asset context](#asset-context) - [Configuring assets](#asset-configuration) -### A basic software-defined asset +### Basic asset definitions -The easiest way to create a software-defined asset is with the decorator. +The easiest way to create an asset definition is with the decorator. ```python file=/concepts/assets/basic_asset_definition.py +import json +import os + from dagster import asset @asset def my_asset(): - return [1, 2, 3] + os.makedirs("data", exist_ok=True) + with open("data/my_asset.json", "w") as f: + json.dump([1, 2, 3], f) ``` By default, the name of the decorated function, `my_asset`, is used as the asset key. The decorated function is responsible for producing the asset's contents. The asset in this example doesn't depend on any other assets. ### Assets with dependencies -Software-defined assets can depend on other software-defined assets. In this section, we'll show you how to define: +Asset definitions can depend on other asset definitions. In this section, we'll show you how to define: - [Basic asset dependencies](#defining-basic-dependencies) -- [Basic managed-loading dependencies](#defining-basic-managed-loading-dependencies) -- [Explicit managed-loading asset dependencies](#defining-explicit-managed-loading-dependencies) -- [External asset dependencies](#defining-external-asset-dependencies) +- [Asset dependencies across code locations](#defining-asset-dependencies-across-code-locations) #### Defining basic dependencies @@ -86,7 +89,9 @@ from dagster import asset @asset def sugary_cereals() -> None: - execute_query("CREATE TABLE sugary_cereals AS SELECT * FROM cereals") + execute_query( + "CREATE TABLE sugary_cereals AS SELECT * FROM cereals WHERE sugar_grams > 10" + ) @asset(deps=[sugary_cereals]) @@ -94,136 +99,52 @@ def shopping_list() -> None: execute_query("CREATE TABLE shopping_list AS SELECT * FROM sugary_cereals") ``` -#### Defining basic managed-loading dependencies - -When using basic dependencies, as above, it's expected that if you need direct access to the contents of the asset, the code you include inside your `@asset`-decorated function will load the data from the upstream asset. Dagster alternatively allows you to delegate loading data to an [I/O manager](/concepts/io-management/io-managers). To do this, you express the dependency by using the upstream asset name as the name of one of the arguments on the decorated function. - -In the following example, `downstream_asset` depends on `upstream_asset`. That means that the contents of `upstream_asset` are provided to the function that computes the contents of `downstream_asset`. - -```python file=/concepts/assets/asset_dependency.py startafter=start_marker endbefore=end_marker -@asset -def upstream_asset(): - return [1, 2, 3] - - -@asset -def downstream_asset(upstream_asset): - return upstream_asset + [4] -``` - -In this example, Dagster will load the data returned from `upstream_asset` and pass it as the `upstream_asset` parameter to `downstream_asset`. - -#### Defining explicit managed-loading dependencies - -If defining dependencies by matching argument names to upstream asset names feels too magical for your tastes, you can also define dependencies in a more explicit way: - -```python file=/concepts/assets/explicit_string_asset_dependency.py -from dagster import AssetIn, asset - - -@asset -def upstream_asset(): - return [1, 2, 3] - - -@asset(ins={"upstream": AssetIn("upstream_asset")}) -def downstream_asset(upstream): - return upstream + [4] -``` - -In this case, `ins={"upstream": AssetIn("upstream_asset")}` declares that the contents of the asset with the key `upstream_asset` will be provided to the function argument named `upstream`. +#### Defining asset dependencies across code locations -Asset keys can also be provided to to explicitly identify the asset. For example: +Assets can depend on assets in different [code locations](/concepts/code-locations). Consider this example for `code_location_1`: -```python file=/concepts/assets/explicit_asset_dependency_asset_keys.py -from dagster import AssetIn, asset - - -# If the upstream key has a single segment, you can specify it with a string: -@asset(ins={"upstream": AssetIn(key="upstream_asset")}) -def downstream_asset(upstream): - return upstream + [4] - - -# If it has multiple segments, you can provide a list: -@asset(ins={"upstream": AssetIn(key=["some_db_schema", "upstream_asset"])}) -def another_downstream_asset(upstream): - return upstream + [10] -``` - -#### Defining external asset dependencies - -Software-defined assets frequently depend on assets that are generated elsewhere. Using , you can include these external assets and allow your other assets to depend on them. - -For example: - -```python file=/concepts/assets/source_asset.py startafter=start_marker endbefore=end_marker -from dagster import AssetKey, SourceAsset, asset - -my_source_asset = SourceAsset(key=AssetKey("a_source_asset")) - - -@asset(deps=[my_source_asset]) -def my_derived_asset(): - return execute_query("SELECT * from a_source_asset").as_list() + [4] -``` - -You can also define a dependency on a `SourceAsset` that will load the data of the asset: - -```python file=/concepts/assets/source_asset.py startafter=start_argument_dependency endbefore=end_argument_dependency -@asset -def my_other_derived_asset(a_source_asset): - return a_source_asset + [4] -``` - -**Note**: The source asset's asset key must be provided as the argument to downstream assets. In the previous example, the asset key is `a_source_asset` and not `my_source_asset`. - -You can also re-use assets across [code locations](/concepts/code-locations) by including them as source assets. Consider this example for `code_location_1`: - -```python file=/concepts/assets/cross_cl_code_location_1.py +```python file=/concepts/assets/cross_cl_code_location_1.py startafter=start endbefore=end # code_location_1.py +import json + from dagster import Definitions, asset @asset def code_location_1_asset(): - return 5 + with open("/data/code_location_1_asset.json", "w+") as f: + json.dump(5, f) defs = Definitions(assets=[code_location_1_asset]) ``` -And then in `code_location_2`, we've included `code_location_1_asset` as a source asset: +In `code_location_2`, we can then reference it via its asset key: -```python file=/concepts/assets/cross_cl_code_location_2.py +```python file=/concepts/assets/cross_cl_code_location_2.py startafter=start endbefore=end # code_location_2.py +import json -from dagster import AssetKey, Definitions, SourceAsset, asset +from dagster import AssetKey, Definitions, asset -code_location_1_source_asset = SourceAsset(key=AssetKey("code_location_1_asset")) +@asset(deps=["code_location_1_asset"]) +def code_location_2_asset(): + with open("/data/code_location_1_asset.json", "r") as f: + x = json.load(f) -@asset -def code_location_2_asset(code_location_1_asset): - return code_location_1_asset + 6 + with open("/data/code_location_2_asset.json", "w+") as f: + json.dump(x + 6, f) -defs = Definitions( - assets=[code_location_2_asset, code_location_1_source_asset], -) +defs = Definitions(assets=[code_location_2_asset]) ``` -Using source assets has a few advantages over having the code inside of an asset's function load the data without specifying any dependencies: - -- **The UI can show asset lineage that includes the source assets**. If different asset definitions in different code locations have the same asset key as a and both code locations are loaded into the underlying webserver, the UI can represent the asset lineage across the code locations. This can be accomplished using [workspace files](/concepts/code-locations/workspace-files). -- **Dagster can use data-loading code factored into an to load the contents of the source asset**. -- **Asset dependencies can be written in a consistent way,** independent of whether they're downstream from a source asset or a software-defined asset. This makes it easy to swap out a source asset for a software-defined asset and vice versa. - ### Graph-backed assets and multi-assets -If you'd like to define more complex assets, Dagster offers augmented software-defined asset abstractions: +If you'd like to define more complex assets, Dagster offers augmented asset definition abstractions: -- [Multi-assets](/concepts/assets/multi-assets): A set of software-defined assets that are all updated by the same [op](/concepts/ops-jobs-graphs/ops) or [graph](/concepts/ops-jobs-graphs/graphs). +- [Multi-assets](/concepts/assets/multi-assets): A set of asset definitions that are all updated by the same [op](/concepts/ops-jobs-graphs/ops) or [graph](/concepts/ops-jobs-graphs/graphs). - [Graph-backed assets](/concepts/assets/graph-backed-assets): An asset whose computations are separated into multiple [ops](/concepts/ops-jobs-graphs/ops) that are combined to build a [graph](/concepts/ops-jobs-graphs/graphs). If the graph outputs multiple assets, the graph-backed asset is a [multi-asset](/concepts/assets/multi-assets). ### Asset configuration @@ -238,26 +159,21 @@ For example, the following downstream asset queries an API endpoint defined thro from dagster import Config, asset -@asset -def my_upstream_asset() -> int: - return 5 - - class MyDownstreamAssetConfig(Config): api_endpoint: str @asset -def my_downstream_asset(config: MyDownstreamAssetConfig, my_upstream_asset: int) -> int: +def my_downstream_asset(config: MyDownstreamAssetConfig): data = requests.get(f"{config.api_endpoint}/data").json() - return data["value"] + my_upstream_asset + ... ``` Refer to the [Config schema documentation](/concepts/configuration/config-schema) for more configuration info and examples. ### Asset context -When writing an asset, users can optionally provide a first parameter, `context`. When this parameter is supplied, Dagster will supply an object to the body of the asset which provides access to system information like loggers and the current run id. +When writing an asset, users can optionally provide a first parameter, `context`. When this parameter is supplied, Dagster will supply an object to the body of the asset which provides access to system information like loggers and the current run ID. For example, to access the logger and log an info message: @@ -271,31 +187,6 @@ def context_asset(context: AssetExecutionContext): ... ``` -### Conditional materialization - -In some cases, an asset may not need to be updated in storage each time the decorated function is executed. You can use the `output_required` parameter along with `yield` syntax to implement this behavior. - -If the `output_required` parameter is set to `False`, and your function does not `yield` an object, then no asset materialization event will be created, the I/O manager will not be invoked, downstream assets will not be materialized, and asset sensors monitoring the asset will not trigger. - -```python file=/concepts/assets/conditional_materialization.py -import random - -from dagster import Output, asset - - -@asset(output_required=False) -def may_not_materialize(): - # to simulate an asset that may not always materialize. - if random.randint(1, 10) < 5: - yield Output([1, 2, 3, 4]) - - -@asset -def downstream(may_not_materialize): - # will not run when may_not_materialize doesn't materialize the asset - return may_not_materialize + [5] -``` - ### Asset code versions Assets may be assigned a `code_version`. Versions let you help Dagster track what assets haven't been re-materialized since their code has changed, and avoid performing redundant computation. @@ -303,11 +194,18 @@ Assets may be assigned a `code_version`. Versions let you help Dagster track wha ```python file=/concepts/assets/code_versions.py startafter=start_single_asset endbefore=end_single_asset @asset(code_version="1") def asset_with_version(): - return 100 + with open("data/asset_with_version.json", "w") as f: + json.dump(100, f) ``` When an asset with a code version is materialized, the generated `AssetMaterialization` is tagged with the version. The UI will indicate when an asset has a different code version than the code version used for its most recent materialization. +### Defining metadata and tags + +Dagster offers several ways to provide useful information and documentation alongside your data pipelines, including metadata and tagging. For example, you can attach metadata to an asset that calculates how many records are processed during each run and then view the data as a plot in the Dagster UI! + +Check out the [Metadata & tags](/concepts/metadata-tags) documentation to get started. + ### Retrying failed assets If an exception occurs during asset execution, you can use a to automatically retry the asset within the same run. @@ -315,7 +213,14 @@ If an exception occurs during asset execution, you can use a : - -```python file=/concepts/assets/load_asset_values.py startafter=single_asset_start_marker endbefore=single_asset_end_marker dedent=4 -asset1_value = defs.load_asset_value(AssetKey("asset1")) -``` - -To load the values of multiple assets, use , which avoids spinning up resources separately for each asset: - -```python file=/concepts/assets/load_asset_values.py startafter=multiple_asset_start_marker endbefore=multiple_asset_end_marker dedent=4 -with defs.get_asset_value_loader() as loader: - asset1_value = loader.load_asset_value(AssetKey("asset1")) - asset2_value = loader.load_asset_value(AssetKey("asset2")) -``` - ---- - ## Examples - [Multi-component asset keys](#multi-component-asset-keys) -- [Recording materialization metadata](#recording-materialization-metadata) -- [Attaching definition metadata](#attaching-definition-metadata) ### Multi-component asset keys -Assets are often objects in systems with hierarchical namespaces, like filesystems. Because of this, it often makes sense for an asset key to be a list of strings, instead of just a single string. To define an asset with a multi-part asset key, use the `key_prefix` argument-- this can be either a list of strings or a single string with segments delimited by "/". The full asset key is formed by prepending the `key_prefix` to the asset name (which defaults to the name of the decorated function). +Assets are often objects in systems with hierarchical namespaces, like filesystems. Because of this, it often makes sense for an asset key to be a list of strings, instead of just a single string. To define an asset with a multi-part asset key, use the `key_prefix` argument with a list of strings. The full asset key is formed by prepending the `key_prefix` to the asset name (which defaults to the name of the decorated function). ```python file=/concepts/assets/multi_component_asset_key.py startafter=start_marker endbefore=end_marker from dagster import AssetIn, asset @@ -616,66 +501,16 @@ def upstream_asset(): return [1, 2, 3] -@asset(ins={"upstream_asset": AssetIn(key_prefix="one/two/three")}) +@asset(ins={"upstream_asset": AssetIn(key_prefix=["one", "two", "three"])}) def downstream_asset(upstream_asset): return upstream_asset + [4] ``` -### Recording materialization metadata - -Dagster supports attaching arbitrary [metadata](/\_apidocs/ops#dagster.MetadataValue) to asset materializations. This metadata will be displayed on the "Activity" tab of the "Asset Details" page in the UI. If it's numeric, it will be plotted. To attach metadata, your asset can return an object that contains the output value and a dictionary of metadata: - -```python file=/concepts/assets/asset_materialization_metadata.py -from pandas import DataFrame - -from dagster import Output, asset - - -@asset -def table1() -> Output[DataFrame]: - df = DataFrame({"col1": [1, 2], "col2": [3, 4]}) - return Output(df, metadata={"num_rows": df.shape[0]}) -``` - -This works even if you're not returning an object from your decorated function: - -```python file=/concepts/assets/asset_materialization_metadata_none.py -from dagster import Output, asset - - -@asset -def table1() -> Output[None]: - ... # write out some data to table1 - return Output(None, metadata={"num_rows": 25}) -``` - -#### Recording materialization metadata using I/O managers - -Sometimes it's useful to record the same metadata for all assets that are stored in the same way. E.g. if you have a set of assets that are all stored on a filesystem, you might want to record the number of bytes they occupy on disk every time one is materialized. You can achieve this by [recording metadata from an I/O manager](/concepts/io-management/io-managers#recording-metadata-from-an-io-manager) that's shared by the assets. - -### Attaching definition metadata - -Dagster supports attaching arbitrary [metadata](/\_apidocs/ops#dagster.MetadataValue) to asset definitions. This metadata will be displayed on the "Definition" tab of the "Asset Details" page in the UI. This is useful for metadata that describes how the asset should be handled, rather than metadata that describes the contents that were produced by a particular run. - -To attach metadata, supply a `metadata` dictionary to the asset: - -```python file=/concepts/assets/asset_definition_metadata.py startafter=start_example endbefore=end_example -@asset(metadata={"owner": "alice@mycompany.com", "priority": "high"}) -def my_asset(): - return 5 -``` - ---- - -## Further reading - -Interested in learning more about software-defined assets and working through a more complex example? Check out our [guide on software-defined assets](/guides/dagster/software-defined-assets) and our [example project](https://github.com/dagster-io/dagster/tree/master/examples/assets_modern_data_stack) that integrates software-defined assets with other Modern Data Stack tools. - --- ## See it in action -For more examples of software-defined assets, check out these examples: +For more examples of asset definitions, check out these examples: - In the [Fully Featured Project example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): diff --git a/docs/content/concepts/automation.mdx b/docs/content/concepts/automation.mdx index a7e5fef998373..09c0d5813a936 100644 --- a/docs/content/concepts/automation.mdx +++ b/docs/content/concepts/automation.mdx @@ -5,11 +5,11 @@ description: "Learn to automatically run your Dagster pipelines." # Automation -Dagster offers several ways to run data pipelines without manual intervation, including traditional scheduling and event-based triggers. Automating your Dagster pipelines can boost efficiency and ensure that data is produced consistently and reliably. +Dagster offers several ways to run data pipelines without manual intervention, including traditional scheduling and event-based triggers. Automating your Dagster pipelines can boost efficiency and ensure that data is produced consistently and reliably. When one of Dagster's automation methods is triggered, a **tick** is created, which indicates that a **run** should occur. The tick will kick off a run, which is a single instance of a pipeline being executed. -In this guide, we'll cover the available automation methods Dagser provides and when to use each one. +In this guide, we'll cover the available automation methods Dagster provides and when to use each one. --- @@ -17,7 +17,7 @@ In this guide, we'll cover the available automation methods Dagser provides and Before continuing, you should be familiar with: -- [Software-defined Assets][assets] +- [Asset definitions][assets] - [Jobs][jobs] (_optional_) - [Ops][ops] (_optional; advanced_) @@ -41,18 +41,16 @@ You can use sensors to run a job or materialize an asset in response to specific You can also use sensors to act on the status of a job run. Refer to the [Sensors documentation][sensors] to learn more. -### Auto-materialize policies +### Declarative Automation -If you want a declarative approach to automating your pipelines, Auto-materialize policies (AMP) may be a good fit. AMPs allow you to assign policies to assets and let Dagster determine the best approach to keeping assets up-to-date while adhering to those policies. +Declarative Automation allows you to automatically materialize assets when specified criteria are met. Using Declarative Automation, you could update assets: -For example, with AMPs, you can update assets based on: +- When the asset hasn't yet been materialized +- When an asset's upstream dependency has been updated +- After an asset's parents have been updated since a cron tick +- ... based on your own custom conditions -- Whether an upstream dependency has been updated -- Whether an upstream dependency has the latest data from its dependencies -- Whether a materialization has occured since the last tick of a cron schedule -- ... and more - -AMPs are declared on an asset-by-asset basis, but can be applied to multiple assets at once. Refer to the [Auto-materializing Assets documentation][auto-materialize-policies] to learn more. +Materialization conditions are declared on an asset-by-asset basis. Refer to the [Declarative Automation documentation][declarative-automation] to learn more. ### Asset Sensors @@ -83,7 +81,7 @@ The following cheatsheet contains high-level details about each of the automatio Method @@ -108,7 +106,7 @@ The following cheatsheet contains high-level details about each of the automatio - Schedules + Schedules Starts a job at a specified time @@ -175,13 +173,13 @@ The following cheatsheet contains high-level details about each of the automatio - - Auto-materialize policies + + Declarative Automation - Automatically materializes an asset or selection of assets when - specified criteria (ex: upstream changes) are met + Automatically materializes an asset when specified criteria (ex: + upstream changes) are met
    + This feature is currently experimental. + + +Dagster can automatically execute assets or checks when criteria are met, enabling a declarative approach to automation. Instead of defining explicit workflows and schedules, you describe the conditions under which they should be executed, and the system executes runs in response. + +Declarative Automation includes pre-built conditions to handle common use cases, such as executing on a periodic schedule or whenever an upstream dependency updates, but conditions can be customized in a fine-grained manner, allowing precise control over when work gets executed. + +--- + +## Benefits + +Using Declarative Automation helps you: + +- Ensure you're working with the most up-to-date data +- Optimize resource usage by only materializing assets or executing checks when needed +- Simplify how your team understands their assets by consolidating all asset logic to a single location +- Avoid thinking about specific workflow boundaries, such as a [schedule accounting for timezones or Daylight Savings Time](/concepts/automation/schedules/customizing-executing-timezones) + +--- + +## Prerequisites + +Before continuing, you should be familiar with: + +- [Asset definitions](/concepts/assets/software-defined-assets) +- [Sensor definitions](/concepts/partitions-schedules-sensors/sensors) +- [Code locations](/concepts/code-locations) + +--- + +## How it works + +Declarative Automation is an automation method that executes runs when conditions are met. This method contains two main components: + +- **An automation condition (**), which represents when an individual asset or check should be executed. +- **A sensor (**), which evaluates each and launches runs in response to their status. + +### Automation conditions + +Automation conditions describe the conditions under which work should be executed. Dagster provides three pre-built conditions: + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Name + + Description + Useful for
    + AutomationCondition.on_cron(cron_schedule) + + This condition will materialize an asset on a provided cron schedule, + after all of its parents have been updated + + Regularly updating an asset without worrying about the specifics of how + its parents update +
    + AutomationCondition.on_missing() + + This condition will materialize an asset if all its dependencies have + been updated, but the asset itself has not. + + Filling in partitioned assets as soon as upstream data is available. +
    + AutomationCondition.eager() + + This condition will materialize an asset: +
      +
    • If the asset has never been materialized before, or
    • +
    • + When the asset's parents update, as long as none of the parents are + currently missing or have an update in progress +
    • +
    +
    +
      +
    • + Automatically propagating changes through the asset graph +
    • +
    • Ensuring assets remain up-to-date
    • +
    +
    + +With assets, automation conditions can be set on the decorator or on an : + +```python +from dagster import AssetSpec, AutomationCondition, asset + +@asset(automation_condition=AutomationCondition.eager()) +def my_eager_asset(): ... + +AssetSpec("my_cron_asset", automation_condition=AutomationCondition.on_cron("@daily")) +``` + +The same is true for asset checks: + +```python +from dagster import AssetCheckResult, AssetCheckSpec, AutomationCondition, asset_check + + +@asset_check(asset=..., automation_condition=AutomationCondition.cron_tick_passed("@daily")) +def expensive_check() -> AssetCheckResult: + return AssetCheckResult(passed=True) + + +AssetCheckSpec( + "expensive_check", + asset=..., + automation_condition=AutomationCondition.cron_tick_passed("@daily"), +) +``` + +The core framework is extremely flexible, allowing you to build custom conditions from the ground up. Refer to the [Customizing automation conditions guide](/concepts/automation/declarative-automation/customizing-automation-conditions) for more information. + +### Sensors + +When automation conditions for an asset are met, a sensor will execute a run to materialize the asset. This sensor, named `default_automation_condition_sensor`, will be available for each code location and monitor all assets within that location. To use multiple sensors or change the properties of the default sensor, refer to the documentation. + +For an automation condition sensor to run, it must be turned on and an active [`dagster-daemon` process](/deployment/dagster-daemon) must be running. If you used [`dagster dev` to start the Dagster UI/webserver](/guides/running-dagster-locally), the daemon process will be automatically launched alongside the webserver. + +After these criteria are met, the sensor's evaluation history will be visible in the UI: + + + + + +You'll also be able to view a detailed history of each asset's evaluations on the asset's [**Asset Details** page](/concepts/webserver/ui#asset-details). This allows you to see why an asset was or wasn't materialized at different points in time: + + + + + +--- + +## Getting started + +To use Declarative Automation, you'll need to enable the automation condition sensor in the Dagster UI: + +1. Navigate to **Overview > Sensors** +2. Locate the desired code location. +3. In the code location, toggle the `default_automation_condition_sensor` sensor to on. + +From here, you can: + +- Define custom automation conditions +- View a history of each evaluation for the sensor +- Navigate to individual assets to see a history of their evaluations + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/automation/declarative-automation/customizing-automation-conditions.mdx b/docs/content/concepts/automation/declarative-automation/customizing-automation-conditions.mdx new file mode 100644 index 0000000000000..c8c71de058f79 --- /dev/null +++ b/docs/content/concepts/automation/declarative-automation/customizing-automation-conditions.mdx @@ -0,0 +1,278 @@ +--- +title: "Creating custom Declarative Automation conditions | Dagster Docs" +description: "Learn to create your own custom Declarative Automation conditions." +--- + +# Creating custom Declarative Automation conditions + + + Declarative Automation is currently experimental. + + +[Declarative Automation](/concepts/automation/declarative-automation) includes pre-built conditions to handle common use cases, such as executing on a periodic schedule or whenever an upstream dependency updates, but you can also customize conditions. + +By the end of this guide, you'll understand how work and how to create your own custom conditions. + +--- + +## Prerequisites + +Before continuing, you should be familiar with: + +- [Asset definitions](/concepts/assets/software-defined-assets) +- [Declarative Automation](/concepts/automation/declarative-automation) + +--- + +## How it works + +Each consists of a set of **operands** and various **operators**. To create conditions that suit your specific needs, you can combine the operators and operands listed below. For example: + +```python +from dagster import AutomationCondition + +in_progress_or_failed_parents = AutomationCondition.any_deps_match( + AutomationCondition.in_progress() | AutomationCondition.failed() +) +``` + +This condition translates to **Any upstream dependencies (parents) part of an in-progress run or failed during the latest run**. + +### Operands + +Operands are base conditions which can be true or false about a given asset partition. + +| Operand | Description | +| ------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | +| `AutomationCondition.missing` | The asset partition has never been materialized or observed | +| `AutomationCondition.in_progress` | The asset partition is part of an in-progress run | +| `AutomationCondition.failed` | The asset partition failed to be materialized in its latest run | +| `AutomationCondition.newly_updated` | The asset partition was materialized since the previous evaluation | +| `AutomationCondition.newly_requested` | The asset partition was requested on the previous evaluation | +| `AutomationCondition.code_version_changed` | The asset has a new code version since the previous evaluation | +| `AutomationCondition.cron_tick_passed` | A new tick of the provided cron schedule occurred since the previous evaluation | +| `AutomationCondition.in_latest_time_window` | The asset partition falls within the latest time window of the asset’s , if applicable. | +| `AutomationCondition.will_be_requested` | The asset partition will be requested in this tick | + +### Operators + +The above conditions can be built into more complex expressions using the following operators: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Operator + Description
    + ~ (tilde) + + NOT; condition is not true; ex: ~A +
    + | (pipe) + + OR; either condition must be true; ex: A | B +
    + & (ampersand) + + AND; both conditions must be true; ex: A & B +
    + A.newly_true() + Condition A was false on the previous evaluation and is now true.
    + A.since(B) + Condition A became true more recently than Condition B.
    + AutomationCondition.any_deps_match(A) + + Condition A is true for any upstream partition. Can be used with{" "} + .allow() and .ignore() to target specific + upstream assets. Refer to the{" "} + Targeting dependencies section for + an example. +
    + AutomationCondition.all_deps_match(A) + + Condition A is true for at least one partition of each upstream asset. + Can be used with .allow() and .ignore() to + target specific upstream assets. Refer to the{" "} + Targeting dependencies section for + an example. +
    + AutomationCondition.any_downstream_condition() + + Any on a downstream asset + evaluates to true +
    + +### Composite conditions + +Finally, there are a set of pre-built conditions which make it easier to construct common combinations of the above conditions. + +| Condition | Description | +| ------------------------------------------------- | -------------------------------------------------------------------------------------- | +| `AutomationCondition.any_deps_updated` | Any dependencies have been updated since the previous evaluation | +| `AutomationCondition.any_deps_missing` | Any dependencies have never been materialized or observed | +| `AutomationCondition.any_deps_in_progress` | Any dependencies are part of an in-progress run | +| `AutomationCondition.all_deps_updated_since_cron` | All dependencies have been updated since the latest tick of the provided cron schedule | + +--- + +## Modifying policies + +It's common to have use cases similar to pre-built policies but with minor differences. While it is always possible to copy the base implementation and modify it as needed, it can often be simpler to use the `.without()` method to remove the unwanted sub-conditions or add additional conditions with the `&` operator. + +### `AutomationCondition.eager()`: Ignoring missing dependencies + +By default, `AutomationCondition.eager()` will not materialize an asset partition if it has any missing dependencies. If it is expected to have missing upstream data, remove `~AutomationCondition.any_deps_missing()` from the eager policy to allow execution: + +```python +from dagster import AutomationCondition + +condition = AutomationCondition.eager().without( + ~AutomationCondition.any_deps_missing(), +) +``` + +### `AutomationCondition.eager()`: Update older time partitions + +By default, `AutomationCondition.eager()` will only update the latest time partition of an asset. If updates to historical partitions should result in downstream updates, then this sub-condition can be removed: + +```python +from dagster import AutomationCondition + +condition = AutomationCondition.eager().without( + AutomationCondition.in_latest_time_window(), +) +``` + +## Targeting dependencies + +Upstream assets commonly influence downstream materialization decisions. To create automation conditions that target dependencies, use the `AutomationCondition.any_deps_match()` operator. This operator takes an arbitrary , applies it to each upstream asset, and then maps the results to the corresponding downstream partitions. + +This operator and `AutomationCondition.all_deps_match()` can be further customized to only target specific sets of upstream assets by using `.allow()` and `.ignore()`. + +For example, to target updates from a specific asset group, you can use `any_deps_match` with the `newly_updated` operand and tell it to target only the `metrics` asset group: + +```python +from dagster import AssetSelection, AutomationCondition + +AutomationCondition.any_deps_match( + AutomationCondition.newly_updated() +).allow(AssetSelection.groups("metrics")) +``` + +Or to ignore missing partitions from an upstream asset, you can use `any_deps_match` with the `missing` operand and tell it to ignore a specific asset: + +```python +AutomationCondition.any_deps_match( + AutomationCondition.missing() +).ignore(AssetSelection.keys("taxi_trips")) +``` + +Note that these `ignore()` and `allow()` methods also work for composite conditions such as `AutomationCondition.any_deps_missing()` or `AutomationCondition.any_deps_updated()`. + +--- + +## Describing conditions with labels + +When there are a large number of sub-conditions that make up an , it can be difficult to understand and troubleshoot the condition. To make conditions easier to understand, you can attach labels to sub-conditions, which will then display in the Dagster UI. + +Arbitrary string labels can be attached to any node in the tree by using the `with_label()` method, allowing you to describe the purpose of a specific sub-condition. For example: + +```python +from dagster import AutomationCondition + +in_progress_or_failed_parents = AutomationCondition.any_deps_match( + AutomationCondition.in_progress() | AutomationCondition.failed() +).with_label("Any parents in progress or failed") +``` + +Then, when viewing evaluation results in the UI, the label will display next to the condition: + + + + + +Hovering over or expanding the label will display its sub-conditions: + + + + + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/automation/schedules.mdx b/docs/content/concepts/automation/schedules.mdx new file mode 100644 index 0000000000000..082f70a2da4cb --- /dev/null +++ b/docs/content/concepts/automation/schedules.mdx @@ -0,0 +1,108 @@ +--- +title: "Schedules | Dagster Docs" +description: "Use schedules to run your pipelines at fixed time intervals." +--- + +# Schedules + +Schedules are Dagster's way of supporting traditional methods of [automation](/concepts/automation), which allow you to specify when a [job](/concepts/ops-jobs-graphs/jobs) should run. Using schedules, you can define a fixed time interval to run a pipeline, such as daily, hourly, or Monday at 9:00 AM. + +Each interval of a schedule is called a **tick**, which is an indication that a job should execute. Ticks kick off **runs**, which is a single instance of a job being executed. + +When viewing a schedule in [Dagster's UI](/concepts/webserver/ui), you can see the schedule's definition, executing timezone, targeted jobs and partitions, as well as its tick and run history. + +--- + +## Benefits + +Using schedules helps you: + +- Predictably process and deliver data to stakeholders and business-critical applications +- Consistently run data pipelines without the need for manual intervention +- Optimize resource usage by scheduling pipelines to run during off-peak hours + +--- + +## Prerequisites + +Before continuing, you should be familiar with: + +- [Cron syntax](https://en.wikipedia.org/wiki/Cron) +- [Assets](/concepts/assets/software-defined-assets) and [asset jobs](/concepts/assets/asset-jobs) +- [Ops](/concepts/ops-jobs-graphs/ops) and [op jobs](/concepts/ops-jobs-graphs/op-jobs) _(optional)_ + +--- + +## How it works + +Schedules run jobs at fixed time intervals and have two main components: + +- **A job**, which targets a selection of assets or ops +- [**A cron expression**](https://en.wikipedia.org/wiki/Cron), which defines when the schedule runs. Simple and complex schedules are supported, allowing you to have fine-grained control over when runs are executed. With cron syntax, you can: + + - **Create custom schedules** like `Every hour from 9:00AM - 5:00PM` with cron expressions (`0 9-17 * * *`) + - **Quickly create basic schedules** like `Every day at midnight` with predefined cron definitions (`@daily`, `@midnight`) + + To make creating cron expressions easier, you can use an online tool like [Crontab Guru](https://crontab.guru/). This tool allows you to create and describe cron expressions in a human-readable format and test the execution dates produced by the expression. **Note**: While this tool is useful for general cron expression testing, always remember to [test your schedules](/concepts/automation/schedules/testing) in Dagster to ensure the results are as expected. + +For a schedule to run, it must be turned on and an active [`dagster-daemon` process](/deployment/dagster-daemon) must be running. If you used [`dagster dev` to start the Dagster UI/webserver](/guides/running-dagster-locally), the daemon process will be automatically launched alongside the webserver. + +After these criteria are met, the schedule will run at the interval specified in the cron expression. **Schedules will execute in UTC by default**, but [you can specify a custom timezone](/concepts/automation/schedules/customizing-executing-timezones). + +--- + +## Getting started + +Check out these guides to get started with schedules: + +
    + + + +
    + +From here, you can: + +- Construct schedules to run [partitioned jobs](/concepts/automation/schedules/partitioned-schedules) +- Execute jobs in [specific timezones](/concepts/automation/schedules/customizing-executing-timezones) +- Learn to [test your schedules](/concepts/automation/schedules/testing) +- Identify and resolve common issues with our [troubleshooting guide](/concepts/automation/schedules/troubleshooting) + +### Limitations and notes + +- Dagster supports all [predefined cron definitions](https://en.wikipedia.org/wiki/Cron#Nonstandard_predefined_scheduling_definitions) with the exeception of `@reboot` +- Schedules will execute in UTC [unless a timezone is specified](/concepts/automation/schedules/customizing-executing-timezones) +- When defining a schedule's execution time, keep [Daylight Savings Time (DST) in mind](/concepts/automation/schedules/customizing-executing-timezones#execution-times-and-daylight-savings-time) + +--- + +## Related + + + + + + + + diff --git a/docs/content/concepts/automation/schedules/automating-assets-schedules-jobs.mdx b/docs/content/concepts/automation/schedules/automating-assets-schedules-jobs.mdx new file mode 100644 index 0000000000000..216b6a72216d5 --- /dev/null +++ b/docs/content/concepts/automation/schedules/automating-assets-schedules-jobs.mdx @@ -0,0 +1,249 @@ +--- +title: "Automating assets using schedules and jobs | Dagster Docs" +description: "Learn how to automate asset materialization using schedules and jobs." +--- + +# Automating assets using schedules and jobs + +After creating some [asset definitions](/concepts/assets/software-defined-assets), you may want to automate their materialization. + +In this guide, we'll show you one method of accomplishing this by using schedules and jobs. To do this for ops, refer to the [Automating ops using schedules guide](/concepts/automation/schedules/automating-ops-schedules-jobs). + +By the end of this guide, you'll be able to: + +- Create a job that materializes assets +- Create a schedule +- Add the new job and schedule to your project's object +- Turn the schedule on + +--- + +## Prerequisites + +To follow this guide, you'll need: + +- **To install Dagster and the Dagster UI.** Refer to the [Installation guide](/getting-started/install) for more info and instructions. +- **Familiarity with**: + - [Asset definitions](/concepts/assets/software-defined-assets) + - [Jobs](/concepts/ops-jobs-graphs/jobs) + - [Code locations](/concepts/code-locations) () + +--- + +## Step 1: Create a job + +The first step in creating a schedule is to build a job that materializes some assets. + +Let's assume we already have a few assets in our project in a group named `ecommerce_assets`: + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py startafter=start_assets endbefore=end_assets +@asset(group_name="ecommerce_assets") +def orders_asset(): + return 1 + + +@asset(group_name="ecommerce_assets") +def users_asset(): + return 2 +``` + +To create a job that materializes the assets in this group, we'll use : + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py startafter=start_job endbefore=end_job +ecommerce_job = define_asset_job( + "ecommerce_job", AssetSelection.groups("ecommerce_assets") +) +``` + +To create the job, we: + +1. Imported and +2. Constructed the job using and name it `ecommerce_job` +3. Selected all assets in the `ecommerce_assets` group using . Only these assets will be materialized when the job runs. + +Refer to the [Asset jobs documentation](/concepts/assets/asset-jobs) for more info and examples. + +--- + +## Step 2: Define the schedule + +Next, we'll construct the schedule using and attach it to the job we created in [Step 1](#step-1-create-a-job). + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py startafter=start_schedule endbefore=end_schedule +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) +``` + +To build the schedule, we: + +1. Imported `DefaultScheduleStatus` and from `dagster` +2. Created a schedule using that: + + - Is attached to the `ecommerce_job` job + - Has a cron expression of `15 5 * * 1-5`, which translates to `Every Monday through Friday of every month at 5:15AM` + - Is turned on by default (`default_status`). We'll discuss this more in [Step 4](#step-4-turn-the-schedule-on). + +--- + +## Step 3: Update the Definitions object + +Next, we'll update our project's object to include the new job and schedule. This ensures the job and schedule are available to Dagster processes, such as the Dagster UI. + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py startafter=start_definitions endbefore=end_definitions +defs = Definitions( + assets=[orders_asset, users_asset], + jobs=[ecommerce_job], + schedules=[ecommerce_schedule], +) +``` + +At this point, your code should look like the following: + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py lines=3-10,14-23,30-34,41-47,54-60 +from dagster import ( + AssetSelection, + DefaultScheduleStatus, + Definitions, + ScheduleDefinition, + asset, + define_asset_job, +) +@asset(group_name="ecommerce_assets") +def orders_asset(): + return 1 + + +@asset(group_name="ecommerce_assets") +def users_asset(): + return 2 + + + +# end_job + + +# start_schedule + + +# start_definitions +defs = Definitions( + assets=[orders_asset, users_asset], + jobs=[ecommerce_job], + schedules=[ecommerce_schedule], +``` + +--- + +## Step 4: Turn the schedule on + +Schedules must be turned on before they can be used. In our case, we already turned the schedule on by using the `default_status` parameter in its , but there are a few other ways to do this: + + + + + + Heads up! Starting or stopping a schedule in the UI will + override any default status set in code. + + +To turn on a schedule in the Dagster UI, navigate to **Overview > Schedules**: + + + + + + + + +To start and stop schedules via the [dagster CLI](/\_apidocs/cli#dagster-schedule), use the following commands: + +```shell +dagster schedule start +dagster schedule stop +``` + + + + + + Heads up! Starting or stopping a schedule in the UI will + override any default status set in code. + + +You can set the schedule's default status using `DefaultScheduleStatus.RUNNING` in the schedule's : + +```python file=concepts/partitions_schedules_sensors/schedules/basic_asset_schedule.py startafter=start_schedule endbefore=end_schedule +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) +``` + + + + +After the schedule is started, it will begin executing immediately if the [dagster-daemon](/deployment/dagster-daemon) process is running. This process starts automatically when `dagster dev` is run. + + + +--- + + + +## APIs in this guide + +| Name | Description | +| ---------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | +| | A function for defining a job from a selection of assets. | +| | A class that defines a selection of assets. Typically used with . | +| | A class that defines a schedule and attaches it to a job. | +| | The object that contains all the definitions defined within a code location. Definitions include assets, jobs, resources, schedules, and sensors. | + + diff --git a/docs/content/concepts/automation/schedules/automating-ops-schedules-jobs.mdx b/docs/content/concepts/automation/schedules/automating-ops-schedules-jobs.mdx new file mode 100644 index 0000000000000..6fd04d8651ebc --- /dev/null +++ b/docs/content/concepts/automation/schedules/automating-ops-schedules-jobs.mdx @@ -0,0 +1,242 @@ +--- +title: "Automating ops using schedules and jobs | Dagster Docs" +description: "Learn how to automate op execution using schedules and jobs." +--- + +# Automating ops using schedules and jobs + +In this guide, we'll walk you through running ops on a schedule. To do this for asset definitions, refer to the [Automating assets using schedules guide](/concepts/automation/schedules/automating-assets-schedules-jobs). + +By the end of this guide, you'll be able to: + +- Create a job that executes ops +- Create a schedule +- Add the new job and schedule to your project's object +- Turn the schedule on + +--- + +## Prerequisites + +To follow this guide, you'll need: + +- **To install Dagster and the Dagster UI.** Refer to the [Installation guide](/getting-started/install) for more info and instructions. +- **Familiarity with**: + - [Ops](/concepts/ops-jobs-graphs/ops) + - [Jobs](/concepts/ops-jobs-graphs/jobs) + - [Code locations](/concepts/code-locations) () + +--- + +## Step 1: Create a job + +The first step in creating a schedule is to build a job that executes some ops. + +Let's assume we already have a few ops in our project. To create a job that executes the ops, we'll use the decorator to define the job: + +```python file=concepts/partitions_schedules_sensors/schedules/basic_op_schedule.py startafter=start_op_job endbefore=end_op_job +@op +def count_orders(): + return 5 + + +@op +def count_users(arg): + return arg + 1 + + +@job +def ecommerce_job(): + count_users(count_orders()) +``` + +To create the job, we: + +1. Imported +2. Constructed the job using the decorator and name it `ecommerce_job` +3. Within the function's body, we used function calls to define dependencies between the `count_orders` and `count_users` ops + +Refer to the [Op jobs documentation](/concepts/ops-jobs-graphs/op-jobs) for more info and examples. + +--- + +## Step 2: Define the schedule + +Next, we'll construct the schedule using and attach it to the job we created in [Step 1](#step-1-create-a-job). + +```python file=concepts/partitions_schedules_sensors/schedules/basic_op_schedule.py startafter=start_schedule endbefore=end_schedule +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) +``` + +To build the schedule, we: + +1. Imported DefaultScheduleStatus and from `dagster` +2. Created a schedule using that: + + - Is attached to the `ecommerce_job` job + - Has a cron expression of `15 5 * * 1-5`, which translates to `Every Monday through Friday of every month at 5:15AM` + - Is turned on by default (`default_status`). We'll discuss this more in [Step 4](#step-4-turn-the-schedule-on). + +--- + +## Step 3: Update the Definitions object + +Next, we'll update our project's object to include the new job and schedule. This ensures the job and schedule are available to Dagster processes, such as the Dagster UI. + +```python file=concepts/partitions_schedules_sensors/schedules/basic_op_schedule.py startafter=start_definitions endbefore=end_definitions +defs = Definitions( + jobs=[ecommerce_job], + schedules=[ecommerce_schedule], +) +``` + +At this point, your code should look like the following: + +```python file=concepts/partitions_schedules_sensors/schedules/basic_op_schedule.py lines=3-5,7-21,25-31,35-38 +from dagster import job, op, DefaultScheduleStatus, Definitions, ScheduleDefinition + + +@op +def count_orders(): + return 5 + + +@op +def count_users(arg): + return arg + 1 + + +@job +def ecommerce_job(): + count_users(count_orders()) + + +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) + + +defs = Definitions( + jobs=[ecommerce_job], + schedules=[ecommerce_schedule], +) +``` + +--- + +## Step 4: Turn the schedule on + +turned the schedule on by using the `default_status` parameter in its , but there are a few other ways to do this: + + + + + + Heads up! Starting or stopping a schedule in the UI will + override any default status set in code. + + +To turn on a schedule in the Dagster UI, navigate to **Overview > Schedules**: + + + + + + + + +To start and stop schedules via the [dagster CLI](/\_apidocs/cli#dagster-schedule), use the following commands: + +```shell +dagster schedule start +dagster schedule stop +``` + + + + + + Heads up! Starting or stopping a schedule in the UI will + override any default status set in code. + + +You can set the schedule's default status using `DefaultScheduleStatus.RUNNING` in the schedule's : + +```python file=concepts/partitions_schedules_sensors/schedules/basic_op_schedule.py startafter=start_schedule endbefore=end_schedule +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) +``` + + + + +After the schedule is started, it will begin executing immediately if the [dagster-daemon](/deployment/dagster-daemon) process is running. This process starts automatically when `dagster dev` is run. + + + +--- + + + +## APIs in this guide + +| Name | Description | | +| ---------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | - | +| | A decorator used to define ops. Returns an . The decorated function is called the "compute function". | | +| | The decorator used to define a job. | | +| | A class that defines a schedule and attaches it to a job. | | +| | The object that contains all the definitions defined within a code location. Definitions include assets, jobs, resources, schedules, and sensors. | | + + diff --git a/docs/content/concepts/automation/schedules/customizing-executing-timezones.mdx b/docs/content/concepts/automation/schedules/customizing-executing-timezones.mdx new file mode 100644 index 0000000000000..6686cdd68aae7 --- /dev/null +++ b/docs/content/concepts/automation/schedules/customizing-executing-timezones.mdx @@ -0,0 +1,150 @@ +--- +title: "Customizing a schedule's executing timezone | Dagster Docs" +description: "By default, schedules without a set timezone execute in UTC. Learn to specify custom timezones on your Dagster schedules." +--- + +# Customizing a schedule's executing timezone + +[Schedules](/concepts/automation/schedules) that don't have a set timezone will, by default, execute in [UTC](https://en.wikipedia.org/wiki/Coordinated_Universal_Time). By the end of this guide, you'll know how to: + +- Set custom timezones on schedule definitions +- Set custom timezones on partitioned jobs +- Account for the impact of Daylight Savings Time on schedule execution times + +--- + +## Prerequisites + +To follow this guide, you need to be familiar with: + +- [Schedules](/concepts/automation/schedules) +- Jobs, either [asset](/concepts/assets/asset-jobs) or [op-based](/concepts/ops-jobs-graphs/op-jobs) +- [Partitions](/concepts/partitions-schedules-sensors/partitions) (Optional) + +--- + +## Setting timezones on schedule definitions + +Using the `execution_timezone` parameter allows you to specify a timezone for the schedule on the following objects: + +- +- +- + +This parameter accepts any [`tz` timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). For example, the following schedule will execute **every day at 9:00 AM in US Pacific time (America/Los_Angeles)**: + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_timezone endbefore=end_timezone +my_timezone_schedule = ScheduleDefinition( + job=my_job, cron_schedule="0 9 * * *", execution_timezone="America/Los_Angeles" +) +``` + +--- + +## Setting timezones on partitioned jobs + +Schedules constructed from partitioned jobs execute in the timezone defined on the partition's config. Partitions definitions have a `timezone` parameter, which accepts any [`tz` timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). + +For example, the following partition uses the **US Pacific (America/Los_Angeles)** timezone: + +```python file=concepts/partitions_schedules_sensors/partition_with_timezone.py +from dagster import DailyPartitionsDefinition + +daily_partition = DailyPartitionsDefinition( + start_date="2024-05-20", timezone="America/Los_Angeles" +) +``` + +--- + +## Execution times and Daylight Savings Time + +When Daylight Savings Time (DST) begins and ends, there may be some impact on your schedules' execution times. + +### Impact on daily schedules + +Because of DST transitions, it's possible to specify an execution time that doesn't exist for every scheduled interval. + +Let's say you have a **schedule that executes every day at 2:30 AM.** On the day DST begins, time jumps from 2:00AM to 3:00AM, which means the time of 2:30 AM won't exist. + +Dagster would instead run the schedule at the next time that exists, which would be 3:00 AM: + +```markdown +# DST begins: time jumps forward an hour at 2:00 AM + +- 12:30 AM +- 1:00 AM +- 1:30 AM +- 3:00 AM ## time transition; schedule executes +- 3:30 AM +- 4:00 AM +``` + +It's also possible to specify an execution time that exists twice on one day every year. + +Let's say you have a **schedule that executes every day at 1:30 AM.** On the day DST ends, the hour from 1:00 AM to 2:00 AM repeats, which means the time of 1:30 AM will exist twice. This means there are two possible times the schedule could run. + +In this case, Dagster would execute the schedule at the second iteration of 1:30 AM: + +```markdown +# DST ends: time jumps backward an hour at 2:00 AM + +- 12:30 AM +- 1:00 AM +- 1:30 AM +- 1:00 AM ## time transition +- 1:30 AM ## schedule executes +- 2:00 AM +``` + +### Impact on hourly schedules + +Hourly schedules are unaffected by daylight savings time transitions. Schedules will continue to run exactly once an hour, even as DST ends and the hour from 1:00 AM to 2:00 AM repeats. + +Let's say you have a **schedule that executes hourly at 30 minutes past the hour.** On the day DST ends, the schedule would run at 12:30 AM and both instances of 1:30 AM before proceeding normally at 2:30 AM: + +```markdown +# DST ends: time jumps backward an hour at 2:00 AM + +- 12:30 AM ## schedule executes +- 1:00 AM +- 1:30 AM ## schedule executes +- 1:00 AM ## time transition +- 1:30 AM ## schedule executes +- 2:00 AM +- 2:30 AM ## schedule executes +``` + +--- + +## APIs in this guide + +| Name | Description | +| ---------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| | Decorator that defines a schedule that executes according to a given cron schedule. | +| | Class for schedules. | +| | A function that constructs a schedule whose interval matches the partitioning of a partitioned job. | +| | A function that constructs a schedule that materializes a set of specified dbt resources. | + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/automation/schedules/examples.mdx b/docs/content/concepts/automation/schedules/examples.mdx new file mode 100644 index 0000000000000..1f211778e1aea --- /dev/null +++ b/docs/content/concepts/automation/schedules/examples.mdx @@ -0,0 +1,753 @@ +--- +title: "Schedule examples | Dagster Docs" +description: "Examples focused on Dagster schedules." +--- + +# Schedule examples + +This reference contains a variety of examples using Dagster [schedules](/concepts/automation/schedules). Each example contains: + +- A summary +- Additional notes +- Links to relevant documentation +- A list of the APIs used in the example + +--- + +## Defining basic schedules + +The following examples demonstrate how to define some basic schedules. + + + + +This example demonstrates how to define a schedule using that will run a job every day at midnight. While this example uses [op jobs](/concepts/ops-jobs-graphs/jobs) (), the same approach will work with [asset jobs](/concepts/assets/asset-jobs) (). + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_basic_schedule endbefore=end_basic_schedule +@job +def my_job(): ... + + +basic_schedule = ScheduleDefinition(job=my_job, cron_schedule="0 0 * * *") +``` + + + + + + + + + + + + + + + + +
    + Notes + + The cron_schedule argument accepts standard{" "} + cron expressions. If + your croniter dependency's version is{" "} + >= 1.0.12, the argument will also accept the following: +
      +
    • + @daily +
    • +
    • + @hourly +
    • +
    • + @monthly +
    • +
    +
    + Related docs + + +
    + APIs in this example + + ,{" "} + +
    + +
    + + +This example demonstrates how to define a schedule using , which provides more flexibility than . For example, you can [configure job behavior based on its scheduled run time](#configuring-job-behavior-based-on-scheduled-run-time) or [emit log messages](#emitting-log-messages-from-schedule-evaluations). + +```python +@schedule(job=my_job, cron_schedule="0 0 * * *") +def basic_schedule(): ... + # things the schedule does, like returning a RunRequest or SkipReason +``` + + + + + + + + + + + + + + + + +
    + Notes + + The decorator's cron_schedule argument accepts standard{" "} + cron expressions. If + your croniter dependency's version is{" "} + >= 1.0.12, the argument will also accept the following: +
      +
    • + @daily +
    • +
    • + @hourly +
    • +
    • + @monthly +
    • +
    +
    + Related docs + + +
    + APIs in this example + + ,{" "} + +
    + +
    +
    + +--- + +## Emitting log messages from schedule evaluations + +This example demonstrates how to emit log messages from a schedule during its evaluation function. These logs will be visible in the UI when you inspect a tick in the schedule's tick history. + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_schedule_logging endbefore=end_schedule_logging +@schedule(job=my_job, cron_schedule="* * * * *") +def logs_then_skips(context): + context.log.info("Logging from a schedule!") + return SkipReason("Nothing to do") +``` + + + + + + + + + + + + + + + + +
    + Notes + + Schedule logs are stored in your{" "} + + Dagster instance's compute log storage + + . You should ensure that your compute log storage is configured to view your + schedule logs. +
    + Related docs + + +
    + APIs in this example + + ,{" "} + ,{" "} + +
    + +--- + +## Using resources in schedules + +This example demonstrates how to use resources in schedules. To specify a resource dependency, annotate the resource as a parameter to the schedule's function. + +```python file=/concepts/resources/pythonic_resources.py startafter=start_new_resource_on_schedule endbefore=end_new_resource_on_schedule dedent=4 +from dagster import ( + schedule, + ScheduleEvaluationContext, + ConfigurableResource, + job, + RunRequest, + RunConfig, + Definitions, +) +from datetime import datetime +from typing import List + +class DateFormatter(ConfigurableResource): + format: str + + def strftime(self, dt: datetime) -> str: + return dt.strftime(self.format) + +@job +def process_data(): ... + +@schedule(job=process_data, cron_schedule="* * * * *") +def process_data_schedule( + context: ScheduleEvaluationContext, + date_formatter: DateFormatter, +): + formatted_date = date_formatter.strftime(context.scheduled_execution_time) + + return RunRequest( + run_key=None, + tags={"date": formatted_date}, + ) + +defs = Definitions( + jobs=[process_data], + schedules=[process_data_schedule], + resources={"date_formatter": DateFormatter(format="%Y-%m-%d")}, +) +``` + + + + + + + + + + + + + + + + +
    + Notes + + All Dagster definitions, including schedules and resources, must be + attached to a call. +
    + Related docs + + +
    + APIs in this example + +
      +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    +
    + +--- + +## Configuring job behavior based on scheduled run time + +This example demonstrates how to use run config to vary the behavior of a job based on its scheduled run time. + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_run_config_schedule endbefore=end_run_config_schedule +@op(config_schema={"scheduled_date": str}) +def configurable_op(context: OpExecutionContext): + context.log.info(context.op_config["scheduled_date"]) + + +@job +def configurable_job(): + configurable_op() + + +@schedule(job=configurable_job, cron_schedule="0 0 * * *") +def configurable_job_schedule(context: ScheduleEvaluationContext): + scheduled_date = context.scheduled_execution_time.strftime("%Y-%m-%d") + return RunRequest( + run_key=None, + run_config={ + "ops": {"configurable_op": {"config": {"scheduled_date": scheduled_date}}} + }, + tags={"date": scheduled_date}, + ) +``` + + + + + + + + + + + + + + + + +
    + Notes +
    + Related docs + + Op jobs +
    + APIs in this example + + , ,{" "} + ,{" "} + ,{" "} + +
    + +--- + +## Customizing execution timezones + +This example demonstrates how to customize the timezone a schedule executes in. The schedule in this example will execute every day at 9AM in US/Pacific time. + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_timezone endbefore=end_timezone +my_timezone_schedule = ScheduleDefinition( + job=my_job, cron_schedule="0 9 * * *", execution_timezone="America/Los_Angeles" +) +``` + + + + + + + + + + + + +
    + Notes + +
      +
    • + The decorator also accepts + the execution_timezone argument +
    • +
    • Schedules without a set timezone will run in UTC.
    • +
    • + Schedules from partitioned jobs execute in the timezone defined on + the partitioned config +
    • +
    +
    + APIs in this example + + +
    + + + +--- + +## Constructing schedules for partitioned assets and jobs + +This section demonstrates how to use schedules with partitions. We'll cover: + +- Using a helper function to automatically construct schedules based on the partition's config +- Using to manually construct schedules + +### Automatically constructing schedules + +The follow examples demonstrate how to automatically construct schedules for partitioned assets and jobs using a helper function. These examples use , which will build a schedule with a cadence that matches the spacing of the partitions in the asset or job. + +This approach can be used with time or static-based partitions. + + + + +#### Partitioned assets + +This example demonstrates how to automatically construct a schedule for a time-partitioned asset using . + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_partitioned_asset_schedule endbefore=end_partitioned_asset_schedule +from dagster import ( + asset, + build_schedule_from_partitioned_job, + define_asset_job, + DailyPartitionsDefinition, +) + +daily_partition = DailyPartitionsDefinition(start_date="2024-05-20") + + +@asset(partitions_def=daily_partition) +def daily_asset(): ... + + +partitioned_asset_job = define_asset_job("partitioned_job", selection=[daily_asset]) + + +asset_partitioned_schedule = build_schedule_from_partitioned_job( + partitioned_asset_job, +) +``` + + + + + + + + + + + + + + + + +
    + Notes + + If the partition has a timezone defined, the schedule will execute in + the timezone specified on the partitioned config. +
    + Related docs + + +
    + APIs in this example + +
      +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    • + +
    • +
    +
    + +
    + + +#### Partitioned op jobs + +This example demonstrates how to construct a schedule for a time-partitioned op job using . + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_marker endbefore=end_marker +from dagster import build_schedule_from_partitioned_job, job + + +@job(config=partitioned_config) +def partitioned_op_job(): ... + + +partitioned_op_schedule = build_schedule_from_partitioned_job( + partitioned_op_job, +) +``` + + + + + + + + + + + + + + + + +
    + Notes + + If the partition has a timezone defined, the schedule will execute in + the timezone specified on the partitioned config. +
    + Related docs + + +
    + APIs in this example + + ,{" "} + +
    + +
    +
    + +### Manually constructing schedules + +This example demonstrates how to manually construct a schedule for a job with a static partition from scratch using the decorator. + +Using allows for more flexibility in determining which partitions should be run by the schedule, rather than using which automatically creates the schedule based on the partitioned config. + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_static_partition endbefore=end_static_partition +from dagster import schedule, RunRequest + + +@schedule(cron_schedule="0 0 * * *", job=continent_job) +def continent_schedule(): + for c in CONTINENTS: + yield RunRequest(run_key=c, partition_key=c) +``` + + + + + + + + + + + + +
    + Related docs + + +
    + APIs in this example + + ,{" "} + +
    + +--- + +## Testing schedules + +Refer to the [Testing schedules guide](/concepts/automation/schedules/testing) to view examples of tests alongside the schedules they target. + +--- + +## Want more inspiration? + +If you're looking for additional inspiration, we recommend: + +- [**Dagster Open Platform**](https://github.com/dagster-io/dagster-open-platform), which is Dagster Lab's open-source data platform. This full-sized project contains real assets and other Dagster features used by the Dagster Labs team. +- [**GitHub Discussions**](https://github.com/dagster-io/dagster/discussions), where you can ask questions and get inspired by the Dagster community +- The [**Awesome Dagster** repository](https://github.com/dagster-io/awesome-dagster), which is a collection of all awesome things related to Dagster, including other users' projects, talks, articles, and more diff --git a/docs/content/concepts/automation/schedules/partitioned-schedules.mdx b/docs/content/concepts/automation/schedules/partitioned-schedules.mdx new file mode 100644 index 0000000000000..b853bbff97976 --- /dev/null +++ b/docs/content/concepts/automation/schedules/partitioned-schedules.mdx @@ -0,0 +1,264 @@ +--- +title: "Constructing schedules from partitioned assets and jobs | Dagster Docs" +description: "Learn to construct schedules for your partitioned jobs." +--- + +# Constructing schedules from partitioned jobs + +In this guide, we'll walk you through how to construct schedules from partitioned [assets](/concepts/assets/software-defined-assets) and jobs. By the end, you'll be able to: + +- Construct a schedule for a time-partitioned job +- Customize a partitioned job's starting time +- Customize the most recent partition in a set +- Construct a schedule for a statically-partitioned job + +--- + +## Prerequisites + +To follow this guide, you need to be familiar with: + +- [Schedules](/concepts/automation/schedules) +- [Partitions](/concepts/partitions-schedules-sensors/partitions) +- [Asset definitions](/concepts/assets/software-defined-assets) +- [Asset jobs](/concepts/assets/asset-jobs) and [op jobs](/concepts/ops-jobs-graphs/op-jobs) + +--- + +## Working with time-based partitions + +For jobs partitioned by time, you can use the to construct a schedule for the job. The schedule's interval will match the spacing of the partitions in the job. For example, if you have a daily partitioned job that fills in a date partition of a table each time it runs, you likely want to run that job every day. + +Refer to the following tabs for examples of asset and op-based jobs using to construct schedules: + + + + +#### Asset jobs + +Asset jobs are defined using . In this example, we created an asset job named `partitioned_job` and then constructed `asset_partitioned_schedule` by using : + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_partitioned_asset_schedule endbefore=end_partitioned_asset_schedule +from dagster import ( + asset, + build_schedule_from_partitioned_job, + define_asset_job, + DailyPartitionsDefinition, +) + +daily_partition = DailyPartitionsDefinition(start_date="2024-05-20") + + +@asset(partitions_def=daily_partition) +def daily_asset(): ... + + +partitioned_asset_job = define_asset_job("partitioned_job", selection=[daily_asset]) + + +asset_partitioned_schedule = build_schedule_from_partitioned_job( + partitioned_asset_job, +) +``` + + + + +#### Op jobs + +Op jobs are defined using the . In this example, we created a partitioned job named `partitioned_op_job` and then constructed `partitioned_op_schedule` using : + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_marker endbefore=end_marker +from dagster import build_schedule_from_partitioned_job, job + + +@job(config=partitioned_config) +def partitioned_op_job(): ... + + +partitioned_op_schedule = build_schedule_from_partitioned_job( + partitioned_op_job, +) +``` + + + + +### Customizing schedule timing + +The `minute_of_hour`, `hour_of_day`, `day_of_week`, and `day_of_month` parameters of `build_schedule_from_partitioned_job` can be used to control the timing of the schedule. + +Consider the following job: + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_partitioned_schedule_with_offset endbefore=end_partitioned_schedule_with_offset +from dagster import build_schedule_from_partitioned_job + +asset_partitioned_schedule = build_schedule_from_partitioned_job( + partitioned_asset_job, hour_of_day=1, minute_of_hour=30 +) +``` + +On May 20, 2024, the schedule will evaluate at 1:30 AM UTC and then start a run for the partition key of the previous day, `2024-05-19`. + +### Customizing the ending partition in a set + + + Heads up! The examples in this section use daily partitions, + but the same logic also applies to other time-based partitions, such as + hourly, weekly, and monthly partitions. + + +Each schedule tick of a partitioned job targets the latest partition in the partition set that exists as of the tick time. For example, consider a schedule that runs a daily-partitioned job. When the schedule runs on `2024-05-20`, it will target the most recent partition, which will correspond to the previous day: `2024-05-19`. + +| If a job runs on this date... | It will target this partition | +| ----------------------------- | ----------------------------- | +| 2024-05-20 | 2024-05-19 | +| 2024-05-21 | 2024-05-20 | +| 2024-05-22 | 2024-05-21 | + +This occurs because each partition is a **time window**. A time window is a set period of time with a start and an end time. The partition's key is the start of the time window, but the partition isn't included in the partition set until its time window has completed. Kicking off a run after the time window completes allows the run to process data for the entire time window. + +Continuing with the daily partition example, the `2024-05-20` partition would have the following start and end times: + +- **Start time** - `2024-05-20 00:00:00` +- **End time** - `2024-05-20 23:59:59` + +After `2024-05-20 23:59:59` passes, the time window is complete and Dagster will add a new `2024-05-20` partition to the partition set. At this point, the process will repeat with the next time window of `2024-05-21`. + +If you need to customize the ending, or most recent partition in a set, use the `end_offset` parameter in the partition's config: + +```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_offset_partition endbefore=end_offset_partition +from dagster import DailyPartitionsDefinition + +daily_partition_with_offset = DailyPartitionsDefinition( + start_date="2024-05-20", end_offset=-1 +) +``` + +Setting this parameter changes the partition that will be filled in at each schedule tick. Positive and negative integers are accepted, which will have the following effects: + +- **Positive numbers**, like `1`, cause the schedule to fill in the partition of the **current** hour/day/week/month +- **Negative numbers**, like `-1,` cause the schedule to fill in the partition of an **earlier** hour/day/week/month + +Generally, the calculation for `end_offset` can be expressed as: + +```shell +current_date - 1 type_of_partition + end_offset +``` + +Let's look at an example schedule that's partitioned by day and how different `end_offset` values would affect the most recent partition in the set. In this example, we're using a start date of `2024-05-20`: + +| End offset | Calculated as | Ending (most recent) partition | +| ------------ | ----------------------------- | --------------------------------------- | +| Offset of -1 | `2024-05-20 - 1 day + -1 day` | 2024-05-18 (2 days prior to start date) | +| No offset | `2024-05-20 - 1 day + 0 days` | 2024-05-19 (1 day prior to start date) | +| Offset of 1 | `2024-05-20 - 1 day + 1 day` | 2024-05-20 (start date) | + +--- + +## Working with static partitions + +Next, we'll demonstrate how to create a schedule for a job with a static partition. To do this, we'll construct the schedule from scratch using the decorator, rather than using a helper function like . This will allow more flexibility in determining which partitions should be run by the schedule. + +In this example, the job is partitioned by continent: + +```python file=/concepts/partitions_schedules_sensors/static_partitioned_asset_job.py startafter=start_job endbefore=end_job +from dagster import ( + AssetExecutionContext, + Config, + asset, + define_asset_job, + static_partitioned_config, +) + +CONTINENTS = [ + "Africa", + "Antarctica", + "Asia", + "Europe", + "North America", + "Oceania", + "South America", +] + + +@static_partitioned_config(partition_keys=CONTINENTS) +def continent_config(partition_key: str): + return {"ops": {"continents": {"config": {"continent_name": partition_key}}}} + + +class ContinentOpConfig(Config): + continent_name: str + + +@asset +def continents(context: AssetExecutionContext, config: ContinentOpConfig): + context.log.info(config.continent_name) + + +continent_job = define_asset_job( + name="continent_job", selection=[continents], config=continent_config +) +``` + +Using the decorator, we'll write a schedule that targets each partition, or `continent`: + +```python file=/concepts/partitions_schedules_sensors/static_partitioned_asset_job.py startafter=start_schedule_all_partitions endbefore=end_schedule_all_partitions +from dagster import RunRequest, schedule + + +@schedule(cron_schedule="0 0 * * *", job=continent_job) +def continent_schedule(): + for c in CONTINENTS: + yield RunRequest(run_key=c, partition_key=c) +``` + +If we only want to target the `Antarctica` partition, we can create a schedule like the following: + +```python file=/concepts/partitions_schedules_sensors/static_partitioned_asset_job.py startafter=start_single_partition endbefore=end_single_partition +from dagster import RunRequest, schedule + + +@schedule(cron_schedule="0 0 * * *", job=continent_job) +def antarctica_schedule(): + return RunRequest(partition_key="Antarctica") +``` + +--- + +## APIs in this guide + +| Name | Description | +| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| | Decorator that defines a schedule that executes according to a given cron schedule. | +| | A function that constructs a schedule whose interval matches the partitioning of a partitioned job. | +| | A class that represents all the information required to launch a single run. | +| | A function for defining a job from a selection of assets. | +| | The decorator used to define a job. | + +--- + +## Related + + + + + + + + diff --git a/docs/content/concepts/automation/schedules/testing.mdx b/docs/content/concepts/automation/schedules/testing.mdx new file mode 100644 index 0000000000000..f742ba3a06a33 --- /dev/null +++ b/docs/content/concepts/automation/schedules/testing.mdx @@ -0,0 +1,187 @@ +--- +title: "Testing schedules | Dagster Docs" +description: "Learn to test your schedules in Python or the Dagster UI." +--- + +# Testing schedules + +In this guide, we'll show you how to use the Dagster UI and Python to test your [schedules](/concepts/automation/schedules). For more information about testing in Dagster, refer to the [Testing documentation](/concepts/testing). + +--- + +## Testing schedules in the Dagster UI + +Using the UI, you can manually trigger test evaluations of a schedule and view the results. This can be helpful when [creating a schedule](/concepts/automation/schedules/automating-assets-schedules-jobs) or for [troubleshooting unexpected scheduling behavior](/concepts/automation/schedules/troubleshooting). + +1. In the UI, click **Overview > Schedules tab**. + +2. Click the schedule you want to test. + +3. Click the **Test Schedule** button, located near the top right corner of the page. + +4. You'll be prompted to select a mock schedule evaluation time. As schedules are defined on a cadence, the evaluation times in the dropdown are past and future times along that cadence. + + For example, let's say you're testing a schedule with a cadence of `"Every day at X time"`. In the dropdown, you'd see past and future evaluation times along that cadence: + + + + Selecting a mock evaluation time for a schedule in the Dagster UI + +5. After selecting an evaluation time, click the **Evaluate** button. + +A window containing the evaluation result will display after the test completes. If the evaluation was successful, click **Open in Launchpad** to launch a run with the same config as the test evaluation. + +--- + +## Testing schedules in Python + +You can also test your schedules directly in Python. In this section, we'll demonstrate how to test: + +- [`@schedule`-decorated functions](#testing-schedule-decorated-functions) +- [Schedules with resources](#testing-schedules-with-resources) + +### Testing @schedule-decorated functions + +To test a function decorated by the decorator, you can invoke the schedule definition like it's a regular Python function. The invocation will return run config, which can then be validated using the function. + +Let's say we want to test the `configurable_job_schedule` in this example: + +```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_run_config_schedule endbefore=end_run_config_schedule +@op(config_schema={"scheduled_date": str}) +def configurable_op(context: OpExecutionContext): + context.log.info(context.op_config["scheduled_date"]) + + +@job +def configurable_job(): + configurable_op() + + +@schedule(job=configurable_job, cron_schedule="0 0 * * *") +def configurable_job_schedule(context: ScheduleEvaluationContext): + scheduled_date = context.scheduled_execution_time.strftime("%Y-%m-%d") + return RunRequest( + run_key=None, + run_config={ + "ops": {"configurable_op": {"config": {"scheduled_date": scheduled_date}}} + }, + tags={"date": scheduled_date}, + ) +``` + +To test this schedule, we used to construct a to provide to the `context` parameter: + +```python file=concepts/partitions_schedules_sensors/schedules/schedule_examples.py startafter=start_test_cron_schedule_context endbefore=end_test_cron_schedule_context +from dagster import build_schedule_context, validate_run_config + + +def test_configurable_job_schedule(): + context = build_schedule_context( + scheduled_execution_time=datetime.datetime(2020, 1, 1) + ) + run_request = configurable_job_schedule(context) + assert validate_run_config(configurable_job, run_request.run_config) +``` + +If your -decorated function doesn't have a context parameter, you don't need to provide one when invoking it. + +### Testing schedules with resources + +For schedules that utilize [resources](/concepts/resources), you can provide the resources when invoking the schedule function. + +Let's say we want to test the `process_data_schedule` in this example: + +```python file=/concepts/resources/pythonic_resources.py startafter=start_new_resource_on_schedule endbefore=end_new_resource_on_schedule dedent=4 +from dagster import ( + schedule, + ScheduleEvaluationContext, + ConfigurableResource, + job, + RunRequest, + RunConfig, + Definitions, +) +from datetime import datetime +from typing import List + +class DateFormatter(ConfigurableResource): + format: str + + def strftime(self, dt: datetime) -> str: + return dt.strftime(self.format) + +@job +def process_data(): ... + +@schedule(job=process_data, cron_schedule="* * * * *") +def process_data_schedule( + context: ScheduleEvaluationContext, + date_formatter: DateFormatter, +): + formatted_date = date_formatter.strftime(context.scheduled_execution_time) + + return RunRequest( + run_key=None, + tags={"date": formatted_date}, + ) + +defs = Definitions( + jobs=[process_data], + schedules=[process_data_schedule], + resources={"date_formatter": DateFormatter(format="%Y-%m-%d")}, +) +``` + +In the test for this schedule, we provided the `date_formatter` resource to the schedule when we invoked its function: + +```python file=/concepts/resources/pythonic_resources.py startafter=start_test_resource_on_schedule endbefore=end_test_resource_on_schedule dedent=4 +from dagster import build_schedule_context, validate_run_config + +def test_process_data_schedule(): + context = build_schedule_context( + scheduled_execution_time=datetime.datetime(2020, 1, 1) + ) + run_request = process_data_schedule( + context, date_formatter=DateFormatter(format="%Y-%m-%d") + ) + assert ( + run_request.run_config["ops"]["fetch_data"]["config"]["date"] + == "2020-01-01" + ) +``` + +--- + +## APIs in this guide + +| Name | Description | +| ----------------------------------------------- | ------------------------------------------------------------------------------------- | +| | Decorator that defines a schedule that executes according to a given cron schedule. | +| | A function that validates a provided run config blob against a job. | +| | A function that constructs a `ScheduleEvaluationContext`, typically used for testing. | +| | The context passed to the schedule definition execution function. | + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/automation/schedules/troubleshooting.mdx b/docs/content/concepts/automation/schedules/troubleshooting.mdx new file mode 100644 index 0000000000000..480214c6277a9 --- /dev/null +++ b/docs/content/concepts/automation/schedules/troubleshooting.mdx @@ -0,0 +1,149 @@ +--- +title: "Troubleshooting schedules | Dagster Docs" +--- + +# Troubleshooting schedules + +Run into issues with a schedule? Use this guide to diagnose and resolve the problem. + +--- + +## Step 1: Verify the schedule is included in the Definitions object + +First, verify that the schedule has been included in a object. This ensures that the schedule is detectable and loadable by Dagster tools like the Dagster UI and CLI: + +```python +defs = Definitions( + assets=[asset_1, asset_2], + jobs=[job_1], + schedules=[all_assets_job_schedule], +) +``` + +Refer to the [Code locations documentation](/concepts/code-locations) for more info. + +--- + +## Step 2: Verify that the schedule has been started + +1. In the Dagster UI, click **Overview > Schedules tab**. +2. Locate the schedule. Schedules that have been started will have an enabled toggle in the **Running** column: + + + + Enabled toggle next to a schedule in the Schedules tab of the Overview page + +--- + +## Step 3: Check for execution failures + +Next, check that the schedule executed successfully. You can do this by looking at the **Last tick** column in the **Schedules tab**. + +If the scheduled failed to execute, this column will contain a **Failed** badge. Click the badge to display the error and stack trace describing the failure. + +--- + +## Step 4: Verify the schedule's interval configuration + +Next, verify that the schedule is using the time interval you expect. In the **Schedules** tab, locate the schedule and look at the **Schedule** column: + + + +The **Next tick** value indicates when the schedule is next expected to run. In the above image, the next tick is `May 2, 12:00 AM UTC`. + +Verify that the time is what you expect, including the timezone. + +--- + +## Step 5: Verify that the UI is using your latest Dagster code + +The next step is to verify that the UI is using the latest version of your Dagster code. Use the tabs to view instructions for the version of Dagster you're using. + + + + +1. In the UI, click **Settings** in the top navigation. +2. In the **Code locations** tab, click **Reload definitions** near the top right corner of the page. + + + + +1. In the UI, click **Deployment** in the top navigation. +2. In the **Code locations** tab, locate the code location that contains the schedule definition. +3. Click **Redeploy**. + + + + +**If the code location can't be loaded** - for example, due to a syntax error - it will have a **Status** of **Failed**. Click the **View error** link in this column to view the error message. + +**If the code location loaded successfully** but the schedule isn't present in the **Schedules** tab, the schedule may not be included in the code location's `Definitions` object. Refer to [Step 1](#step-1-verify-the-schedule-is-included-in-the-definitions-object) for more information. + +--- + +## Step 6: Verify your dagster-daemon setup + +This section is applicable to Open Source (OSS) deployments. + +If the schedule interval is correctly configured but runs aren't being created, it's possible that the dagster-daemon process isn't working correctly. If you haven't set up a Dagster daemon yet, refer to the [Open Source Deployment guides](/deployment) for more info. + +### Verify the daemon is running + +1. In the UI, click **Deployment** in the top navigation. +2. Click the **Daemons** tab. +3. Locate the **Scheduler** row. + +The daemon process periodically sends out a hearbeat from the scheduler. If the scheduler daemon has a status of **Not running**, this indicates that there's an issue with your daemon deployment. If the daemon ran into an error that resulted in an exception, this error will often display in this tab. + +If there isn't a clear error on this page or if the daemon should be sending heartbeats but isn't, move on to the next step. + +### Check the daemon process logs + +Next, check the logs from the daemon process. The steps to do this will depend on your deployment - for example, if you're using Kubernetes, you'll need to get the logs from the pod that's running the daemon. You should be able to search those logs for the name of the schedule (or `SchedulerDaemon` to see all logs associated with the scheduler) to gain an understanding of what's going wrong. + +If the daemon output contains error indicating the schedule couldn't be found, verify that the daemon is using the same `workspace.yaml` file as the webserver. The daemon does not need to restart in order to pick up changes to the `workspace.yaml` file. Refer to the [Workspace files documentation](/concepts/code-locations/workspace-files) for more information. + +If the logs don't indicate the cause of the issue, move on to the next step. + +### Check for execution failures + +The last step is to check that the schedule executed successfully. If you didn't do this already, refer to [Step 3](#step-3-check-for-execution-failures) for more information. + +--- + +## Need more help? + +**Still stuck?** If these steps didn't resolve the issue, reach out in Slack or file an issue on GitHub. + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/code-locations.mdx b/docs/content/concepts/code-locations.mdx index 12a7a0b148047..130715268f204 100644 --- a/docs/content/concepts/code-locations.mdx +++ b/docs/content/concepts/code-locations.mdx @@ -22,7 +22,7 @@ description: "A code location is a collection of Dagster definitions loadable an /> -A code location is a collection of Dagster definitions loadable and accessible by Dagster's tools, such as the CLI, UI, and Dagster Cloud. A code location comprises: +A code location is a collection of Dagster definitions loadable and accessible by Dagster's tools, such as the CLI, UI, and Dagster+. A code location comprises: - A reference to a Python module that has an instance of in a top-level variable - A Python environment that can successfully load that module @@ -53,7 +53,7 @@ Code locations are loaded in a different process and communicate with Dagster sy To define a code location, create a top-level variable that contains a object in a Python module. For example: ```python -# my_file.py +# definitions.py defs = Definitions( assets=[dbt_customers_asset, dbt_orders_asset], @@ -63,14 +63,14 @@ defs = Definitions( ) ``` -Definitions can be included in a Python file like `my_file.py` or a Python module. If using the latter, the object should be defined in the module's top-level `__init__.py` file. +It is recommenced to include definitions in a Python module named `definitions.py`. --- ## Deploying and loading code locations - [Local development](#local-development) -- [Cloud deployment](#cloud-deployment) +- [Dagster+ deployment](#dagster-deployment) - [Open source deployment](#open-source-deployment) ### Local development @@ -81,9 +81,9 @@ Definitions can be included in a Python file like `my_file.py` or a Python modul Refer to the [Running Dagster locally guide](/guides/running-dagster-locally) for more info about local development, including how to configure your local instance. -### Cloud deployment +### Dagster+ deployment -The [`dagster_cloud.yaml`](/dagster-cloud/managing-deployments/dagster-cloud-yaml) file is used to create and deploy code locations for Cloud deployments. Each code location entry in this file has a `code_source` property, which is used to specify how a code location is sourced. Code locations can be sourced from a Python file or module: +The [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) file is used to create and deploy code locations for Dagster+ deployments. Each code location entry in this file has a `code_source` property, which is used to specify how a code location is sourced. Code locations can be sourced from a Python file or module: @@ -110,7 +110,7 @@ To load a code location from a Python module, use the `module_name` property in locations: - location_name: my-code-location code_source: - module_name: my_module_name + module_name: my_module_name.definitions ``` diff --git a/docs/content/concepts/code-locations/workspace-files.mdx b/docs/content/concepts/code-locations/workspace-files.mdx index ef944c85c2d3a..173cd5dbe1cec 100644 --- a/docs/content/concepts/code-locations/workspace-files.mdx +++ b/docs/content/concepts/code-locations/workspace-files.mdx @@ -81,7 +81,7 @@ To load a code location from a local or installed Python module, use the `python # workspace.yaml load_from: - - python_module: hello_world_module + - python_module: hello_world_module.definitions ``` @@ -153,7 +153,7 @@ dagster api grpc --python-file /path/to/file.py --host 0.0.0.0 --socket /path/to Using a Python module: ```shell -dagster api grpc --module-name my_module_name --host 0.0.0.0 --port 4266 +dagster api grpc --module-name my_module_name.definitions --host 0.0.0.0 --port 4266 ``` --- @@ -257,3 +257,5 @@ The example above also illustrates the `location_name` key. Each code location i + +You can see a working example of a Dagster project that has multiple code locations in our [cloud-examples/multi-location-project repo](https://github.com/dagster-io/cloud-examples/tree/main/multi-location-project). diff --git a/docs/content/concepts/configuration/advanced-config-types.mdx b/docs/content/concepts/configuration/advanced-config-types.mdx index fd7002a936172..4e5ac2f941892 100644 --- a/docs/content/concepts/configuration/advanced-config-types.mdx +++ b/docs/content/concepts/configuration/advanced-config-types.mdx @@ -124,8 +124,7 @@ class MyDataStructuresConfig(Config): user_scores: Dict[str, int] @asset -def scoreboard(config: MyDataStructuresConfig): - ... +def scoreboard(config: MyDataStructuresConfig): ... result = materialize( [scoreboard], @@ -161,8 +160,7 @@ class MyNestedConfig(Config): user_data: Dict[str, UserData] @asset -def average_age(config: MyNestedConfig): - ... +def average_age(config: MyNestedConfig): ... result = materialize( [average_age], @@ -300,7 +298,7 @@ class ProcessUsersConfig(Config): def process_users(config: ProcessUsersConfig): for user, permission in config.users_list.items(): if permission == UserPermissions.ADMIN: - print(f"{user} is an admin") # noqa: T201 + print(f"{user} is an admin") @job def process_users_job(): @@ -372,7 +370,7 @@ executed = {} @op def greet_user(config: UserConfig) -> None: - print(f"Hello {config.name}!") # noqa: T201 + print(f"Hello {config.name}!") executed["greet_user"] = True @job diff --git a/docs/content/concepts/configuration/config-schema-legacy.mdx b/docs/content/concepts/configuration/config-schema-legacy.mdx index eb7f841a1660b..6a2e25628a316 100644 --- a/docs/content/concepts/configuration/config-schema-legacy.mdx +++ b/docs/content/concepts/configuration/config-schema-legacy.mdx @@ -18,11 +18,11 @@ description: Job run configuration allows providing parameters to jobs at the ti Run configuration allows providing parameters to jobs at the time they're executed. -It's often useful to provide user-chosen values to Dagster jobs or software-defined assets at runtime. For example, you might want to choose what dataset an op runs against, or provide a connection URL for a database resource. Dagster exposes this functionality through a configuration API. +It's often useful to provide user-chosen values to Dagster jobs or asset definitions at runtime. For example, you might want to choose what dataset an op runs against, or provide a connection URL for a database resource. Dagster exposes this functionality through a configuration API. Various Dagster entities (ops, assets, resources) can be individually configured. When launching a job that executes (ops), materializes (assets), or instantiates (resources) a configurable entity, you can provide _run configuration_ for each entity. Within the function that defines the entity, you can access the passed-in configuration off of the `context`. Typically, the provided run configuration values correspond to a _configuration schema_ attached to the op/asset/resource definition. Dagster validates the run configuration against the schema and proceeds only if validation is successful. -A common use of configuration is for a [schedule](/concepts/partitions-schedules-sensors/schedules) or [sensor](/concepts/partitions-schedules-sensors/schedules) to provide configuration to the job run it is launching. For example, a daily schedule might provide the day it's running on to one of the ops as a config value, and that op might use that config value to decide what day's data to read. +A common use of configuration is for a [schedule](/concepts/automation/schedules) or [sensor](/concepts/partitions-schedules-sensors/sensors) to provide configuration to the job run it is launching. For example, a daily schedule might provide the day it's running on to one of the ops as a config value, and that op might use that config value to decide what day's data to read. --- diff --git a/docs/content/concepts/configuration/config-schema.mdx b/docs/content/concepts/configuration/config-schema.mdx index 43dd1a5609043..7146204243bd0 100644 --- a/docs/content/concepts/configuration/config-schema.mdx +++ b/docs/content/concepts/configuration/config-schema.mdx @@ -18,11 +18,11 @@ description: Job run configuration allows providing parameters to jobs at the ti Run configuration allows providing parameters to jobs at the time they're executed. -It's often useful to provide user-chosen values to Dagster jobs or Software-defined Assets at runtime. For example, you might want to provide a connection URL for a database resource. Dagster exposes this functionality through a configuration API. +It's often useful to provide user-chosen values to Dagster jobs or asset definitions at runtime. For example, you might want to provide a connection URL for a database resource. Dagster exposes this functionality through a configuration API. Various Dagster entities (assets, ops, resources) can be individually configured. When launching a job that materializes (assets), executes (ops), or instantiates (resources) a configurable entity, you can provide _run configuration_ for each entity. Within the function that defines the entity, you can access the passed-in configuration through the `config` parameter. Typically, the provided run configuration values correspond to a _configuration schema_ attached to the asset/op/resource definition. Dagster validates the run configuration against the schema and proceeds only if validation is successful. -A common use of configuration is for a [schedule](/concepts/partitions-schedules-sensors/schedules) or [sensor](/concepts/partitions-schedules-sensors/schedules) to provide configuration to the job run it is launching. For example, a daily schedule might provide the day it's running on to one of the assets as a config value, and that asset might use that config value to decide what day's data to read. +A common use of configuration is for a [schedule](/concepts/automation/schedules) or [sensor](/concepts/partitions-schedules-sensors/sensors) to provide configuration to the job run it is launching. For example, a daily schedule might provide the day it's running on to one of the assets as a config value, and that asset might use that config value to decide what day's data to read. --- @@ -35,7 +35,7 @@ During execution, the specified config is accessed within the body of the op or -#### Using Software-defined Assets +#### Using asset definitions Here, we define a subclass of holding a single string value representing the name of a user. We can access the config through the `config` parameter in the asset body. @@ -65,7 +65,7 @@ class MyOpConfig(Config): @op def print_greeting(config: MyOpConfig): - print(f"hello {config.person_name}") # noqa: T201 + print(f"hello {config.person_name}") ``` You can also build config into [jobs](/concepts/ops-jobs-graphs/jobs). @@ -191,7 +191,7 @@ asset_result = materialize( ### Using environment variables with config -Assets and ops can be configured using environment variables by passing an when constructing a config object. This is useful when the value is sensitive or may vary based on environment. If using Dagster Cloud, environment variables can be [set up directly in the UI](/guides/dagster/using-environment-variables-and-secrets). +Assets and ops can be configured using environment variables by passing an when constructing a config object. This is useful when the value is sensitive or may vary based on environment. If using Dagster+, environment variables can be [set up directly in the UI](/guides/dagster/using-environment-variables-and-secrets). ```python file=/guides/dagster/pythonic_config/pythonic_config.py startafter=start_execute_with_config_envvar endbefore=end_execute_with_config_envvar dedent=4 from dagster import job, materialize, op, RunConfig, EnvVar diff --git a/docs/content/concepts/configuration/configured.mdx b/docs/content/concepts/configuration/configured.mdx index 8ec3036cd95c2..3740df0ca7d7e 100644 --- a/docs/content/concepts/configuration/configured.mdx +++ b/docs/content/concepts/configuration/configured.mdx @@ -155,7 +155,7 @@ def another_configured_example(config): A common pattern in the development cycle is to use different configuration for each environment. For example, you might connect to a local database during local development and to a production database in your cloud environment. You can use the `configured` API to select between different configurations at runtime: ```python file=/guides/dagster/using_environment_variables_and_secrets/repository_v2.py startafter=start_old endbefore=end_old -# __init__.py +# definitions.py resources = { "local": { diff --git a/docs/content/guides/dagster-pipes.mdx b/docs/content/concepts/dagster-pipes.mdx similarity index 89% rename from docs/content/guides/dagster-pipes.mdx rename to docs/content/concepts/dagster-pipes.mdx index a4d7134d34558..3f163c5c56d7c 100644 --- a/docs/content/guides/dagster-pipes.mdx +++ b/docs/content/concepts/dagster-pipes.mdx @@ -3,11 +3,7 @@ title: Dagster Pipes | Dagster Docs description: "Dagster Pipes provides a protocol between the orchestration environment (Dagster) and external execution (ex: Databricks) and a toolkit for building implementations of that protocol." --- -# Dagster Pipes (Experimental) - - - This feature is currently experimental. - +# Dagster Pipes Dagster Pipes is a toolkit for building integrations between Dagster and external execution environments. It standardizes the process of passing parameters, injecting context information, ingesting logs, and collecting metadata all while remaining agnostic to how remote computations are launched in those environments. This enables the separation of orchestration and business logic in the Dagster ecosystem. @@ -74,12 +70,12 @@ While Dagster Pipes is lightweight and flexible, there are a few limitations to Ready to get started with Dagster Pipes? Depending on what your goal is, how you approach using Dagster Pipes can differ. -- **If you’re writing scripts that are being orchestrated**, all you’ll need to do is lightly modify your existing scripts. Check out the [Dagster Pipes tutorial](/guides/dagster-pipes/subprocess) to get up and running. +- **If you’re writing scripts that are being orchestrated**, all you’ll need to do is lightly modify your existing scripts. Check out the [Dagster Pipes tutorial](/concepts/dagster-pipes/subprocess) to get up and running. - **If you need to orchestrate scripts others have written**, you can use our ready-to-go integrations to execute scripts in your chosen technology: - - [Amazon Web Services Lambda](/guides/dagster-pipes/aws-lambda) - - [Databricks](/guides/dagster-pipes/databricks) - - [Kubernetes](/guides/dagster-pipes/kubernetes) + - [Amazon Web Services Lambda](/concepts/dagster-pipes/aws-lambda) + - [Databricks](/concepts/dagster-pipes/databricks) + - [Kubernetes](/concepts/dagster-pipes/kubernetes) -- **If you don’t see your integration or you want to fully customize your Pipes experience**, check out the [Dagster Pipes details and customization guide](/guides/dagster-pipes/dagster-pipes-details-and-customization) to learn how to create a custom experience. +- **If you don’t see your integration or you want to fully customize your Pipes experience**, check out the [Dagster Pipes details and customization guide](/concepts/dagster-pipes/dagster-pipes-details-and-customization) to learn how to create a custom experience. diff --git a/docs/content/concepts/dagster-pipes/aws-ecs.mdx b/docs/content/concepts/dagster-pipes/aws-ecs.mdx new file mode 100644 index 0000000000000..73597089e0c54 --- /dev/null +++ b/docs/content/concepts/dagster-pipes/aws-ecs.mdx @@ -0,0 +1,137 @@ +--- +title: "Integrating AWS ECS with Dagster Pipes | Dagster Docs" +description: "Learn to integrate Dagster Pipes with AWS ECS to launch external code from Dagster assets." +--- + +# AWS ECS & Dagster Pipes + +This tutorial gives a short overview on how to use [Dagster Pipes](/concepts/dagster-pipes) with [AWS ECS](https://aws.amazon.com/ecs/). + +The [dagster-aws](/\_apidocs/libraries/dagster-aws) integration library provides the resource which can be used to launch AWS ECS tasks from Dagster assets and ops. Dagster can receive regular events like logs, asset checks, or asset materializations from jobs launched with this client. Using it requires minimal code changes on the task side. + +--- + +## Prerequisites + +- **In the orchestration environment**, you'll need to: + + - Install the following packages: + + ```shell + pip install dagster dagster-webserver dagster-aws + ``` + + Refer to the [Dagster installation guide](/getting-started/install) for more info. + + - **AWS authentication credentials configured.** If you don't have this set up already, refer to the [boto3 quickstart](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html). + +- **In AWS**: + + - An existing AWS account + - An AWS ECS task. To receive logs and events from a task container, it must have `"logDriver"` set to `"awslogs"` in `"logConfiguration"`. + +--- + +## Step 1: Install the dagster-pipes module + +Install the `dagster-pipes` module in the image used for your ECS task. For example, you can install the dependency with `pip` in your image Dockerfile: + +```Dockerfile +FROM python:3.11-slim + +RUN python -m pip install dagster-pipes + +# copy the task script +COPY . . +``` + +--- + +## Step 2: Add dagster-pipes to the ECS task script + +Call `open_dagster_pipes` in the ECS task script to create a context that can be used to send messages to Dagster: + +```python file=/guides/dagster/dagster_pipes/ecs/task.py +from dagster_pipes import ( + PipesEnvVarParamsLoader, + PipesS3ContextLoader, + open_dagster_pipes, +) + + +def main(): + with open_dagster_pipes() as pipes: + pipes.log.info("Hello from AWS ECS task!") + pipes.report_asset_materialization( + metadata={"some_metric": {"raw_value": 0, "type": "int"}}, + data_version="alpha", + ) + + +if __name__ == "__main__": + main() +``` + +--- + +## Step 3: Create an asset using the PipesECSClient to launch the task + +In the Dagster asset/op code, use the `PipesECSClient` resource to launch the job: + +```python file=/guides/dagster/dagster_pipes/ecs/dagster_code.py startafter=start_asset_marker endbefore=end_asset_marker +import os + +# dagster_glue_pipes.py +import boto3 +from dagster_aws.pipes import PipesECSClient +from docutils.nodes import entry + +from dagster import AssetExecutionContext, asset + + +@asset +def ecs_pipes_asset(context: AssetExecutionContext, pipes_ecs_client: PipesECSClient): + return pipes_ecs_client.run( + context=context, + run_task_params={ + "taskDefinition": "my-task", + "count": 1, + }, + ).get_materialize_result() +``` + +This will launch the AWS ECS task and wait until it reaches `"STOPPED"` status. If any of the tasks's containers fail, the Dagster process will raise an exception. If the Dagster process is interrupted while the task is still running, the task will be terminated. + +--- + +## Step 4: Create Dagster definitions + +Next, add the `PipesECSClient` resource to your project's object: + +```python file=/guides/dagster/dagster_pipes/ecs/dagster_code.py startafter=start_definitions_marker endbefore=end_definitions_marker +from dagster import Definitions # noqa +from dagster_aws.pipes import PipesS3MessageReader + + +defs = Definitions( + assets=[ecs_pipes_asset], + resources={"pipes_ecs_client": PipesECSClient()}, +) +``` + +Dagster will now be able to launch the AWS ECS task from the `ecs_pipes_asset` asset, and receive logs and events from the task. If using the default `message_reader` `PipesCloudwatchLogReader`, logs will be read from the Cloudwatch log group specified in the container `"logConfiguration"` field definition. Logs from all containers in the task will be read. + +--- + +## Related + + + + + diff --git a/docs/content/concepts/dagster-pipes/aws-glue.mdx b/docs/content/concepts/dagster-pipes/aws-glue.mdx new file mode 100644 index 0000000000000..38fafb25f61c7 --- /dev/null +++ b/docs/content/concepts/dagster-pipes/aws-glue.mdx @@ -0,0 +1,152 @@ +--- +title: "Integrating AWS Glue with Dagster Pipes | Dagster Docs" +description: "Learn to integrate Dagster Pipes with AWS Glue to launch external code from Dagster assets." +--- + +# AWS Glue & Dagster Pipes + +This tutorial gives a short overview on how to use [Dagster Pipes](/concepts/dagster-pipes) with [AWS Glue](https://aws.amazon.com/glue/). + +The [dagster-aws](/\_apidocs/libraries/dagster-aws) integration library provides the resource which can be used to launch AWS Glue jobs from Dagster assets and ops. Dagster can receive regular events like logs, asset checks, or asset materializations from jobs launched with this client. Using it requires minimal code changes on the job side. + +--- + +## Prerequisites + +- **In the orchestration environment**, you'll need to: + + - Install the following packages: + + ```shell + pip install dagster dagster-webserver dagster-aws + ``` + + Refer to the [Dagster installation guide](/getting-started/install) for more info. + + - **AWS authentication credentials configured.** If you don't have this set up already, refer to the [boto3 quickstart](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html). + +- **In AWS**: + + - An existing AWS account + - An AWS Glue job with a Python 3.8+ runtime environment + +--- + +## Step 1: Provide the dagster-pipes module + +Provide the `dagster-pipes` module to the AWS Glue job either by installing it in the Glue job environment or packaging it along with the job script. + +--- + +## Step 2: Add dagster-pipes to the Glue job + +Call `open_dagster_pipes` in the Glue job script to create a context that can be used to send messages to Dagster: + +```python file=/guides/dagster/dagster_pipes/glue/glue_script.py +import boto3 +from dagster_pipes import ( + PipesCliArgsParamsLoader, + PipesS3ContextLoader, + open_dagster_pipes, +) + +client = boto3.client("s3") +context_loader = PipesS3ContextLoader(client) +params_loader = PipesCliArgsParamsLoader() + + +def main(): + with open_dagster_pipes( + context_loader=context_loader, + params_loader=params_loader, + ) as pipes: + pipes.log.info("Hello from AWS Glue job!") + pipes.report_asset_materialization( + metadata={"some_metric": {"raw_value": 0, "type": "int"}}, + data_version="alpha", + ) + + +if __name__ == "__main__": + main() +``` + +--- + +## Step 3: Add the PipesGlueClient to Dagster code + +In the Dagster asset/op code, use the `PipesGlueClient` resource to launch the job: + +```python file=/guides/dagster/dagster_pipes/glue/dagster_code.py startafter=start_asset_marker endbefore=end_asset_marker +import os + +import boto3 +from dagster_aws.pipes import PipesGlueClient + +from dagster import AssetExecutionContext, asset + + +@asset +def glue_pipes_asset( + context: AssetExecutionContext, pipes_glue_client: PipesGlueClient +): + return pipes_glue_client.run( + context=context, + start_job_run_params={ + "JobName": "Example Job", + "Arguments": {"some_parameter": "some_value"}, + }, + ).get_materialize_result() +``` + +This will launch the AWS Glue job and monitor its status until it either fails or succeeds. A job failure will also cause the Dagster run to fail with an exception. + +--- + +## Step 4: Create Dagster definitions + +Next, add the `PipesGlueClient` resource to your project's object: + +```python file=/guides/dagster/dagster_pipes/glue/dagster_code.py startafter=start_definitions_marker endbefore=end_definitions_marker +from dagster import Definitions # noqa +from dagster_aws.pipes import PipesS3ContextInjector, PipesCloudWatchMessageReader + + +bucket = os.environ["DAGSTER_GLUE_S3_CONTEXT_BUCKET"] + + +defs = Definitions( + assets=[glue_pipes_asset], + resources={ + "pipes_glue_client": PipesGlueClient( + client=boto3.client("glue"), + context_injector=PipesS3ContextInjector( + client=boto3.client("s3"), + bucket=bucket, + ), + message_reader=PipesCloudWatchMessageReader(client=boto3.client("logs")), + ) + }, +) +``` + +Dagster will now be able to launch the AWS Glue job from the `glue_pipes_asset` asset. + +By default, the client uses the CloudWatch log stream (`.../output/`) created by the Glue job to receive Dagster events. The client will also forward the stream to `stdout`. + +To customize this behavior, the client can be configured to use , and the Glue job to use . + +--- + +## Related + + + + + diff --git a/docs/content/guides/dagster-pipes/aws-lambda.mdx b/docs/content/concepts/dagster-pipes/aws-lambda.mdx similarity index 77% rename from docs/content/guides/dagster-pipes/aws-lambda.mdx rename to docs/content/concepts/dagster-pipes/aws-lambda.mdx index 102366ddf9a42..abe50035a5dc6 100644 --- a/docs/content/guides/dagster-pipes/aws-lambda.mdx +++ b/docs/content/concepts/dagster-pipes/aws-lambda.mdx @@ -8,18 +8,16 @@ description: "Learn to integrate Dagster Pipes with AWS Lambda to launch externa Heads up! This guide focuses on using an out-of-the-box Amazon Web Services (AWS) Lambda resource. For further customization, use the{" "} - + open_pipes_session approach {" "} instead. -In this guide, we’ll show you how to use [Dagster Pipes](/guides/dagster-pipes) with Dagster’s AWS Lambda integration to invoke a Lambda function and execute external code. +In this guide, we’ll show you how to use [Dagster Pipes](/concepts/dagster-pipes) with Dagster’s AWS Lambda integration to invoke a Lambda function and execute external code. Pipes allows your code to interact with Dagster outside of a full Dagster environment. Instead, the environment only needs to contain `dagster-pipes`, a single-file Python package with no dependencies that can be installed from PyPI or easily vendored. `dagster-pipes` handles streaming `stdout`/`stderr` and Dagster events back to the orchestration process. -**Note**: Dagster Pipes is currently **experimental**. - --- ## Prerequisites @@ -36,7 +34,7 @@ To use Dagster Pipes with AWS Lambda, you’ll need: Refer to the [Dagster installation guide](/getting-started/install) for more info. - - **An existing boto3 client that can authenticate to AWS.** If you don't have this set up already, refer to the [boto3 quickstart](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html). + - **AWS authentication credentials configured.** If you don't have this set up already, refer to the [boto3 quickstart](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html). - **In AWS**: @@ -113,17 +111,16 @@ Next, you'll add `dagster-pipes` to the function. In this step, you'll add the code you want to execute to the function. Create another file in the AWS UI - or use the default `lambda_function.py` file created by the function - and paste in the following code: -```python +```python file=/guides/dagster/dagster_pipes/lambda/lambda_function.py from dagster_pipes import PipesMappingParamsLoader, open_dagster_pipes def lambda_handler(event, _context): - with open_dagster_pipes( params_loader=PipesMappingParamsLoader(event), ) as pipes: # Get some_parameter_value from the event payload - some_parameter_value = event['some_parameter_value'] + some_parameter_value = event["some_parameter_value"] # Stream log message back to Dagster pipes.log.info(f"Using some_parameter value: {some_parameter_value}") @@ -151,11 +148,11 @@ Let's review what this code does: - **Initializes the Dagster Pipes context (), which yields an instance of called `pipes`.** - On the orchestration side - which we'll discuss in the next section - we'll set up a Dagster asset that uses the to inject information needed for Pipes in an `event` payload. In this code on the AWS Lambda side, we're passing this payload to and using it in . + On the orchestration side - which we'll discuss in the next section - we'll set up a Dagster asset that uses the to inject information needed for Pipes in an `event` payload. In this code on the AWS Lambda side, we're passing this payload to and using it in . - We're using the default context loader () and message writer () in this example. These objects establish communication between the orchestration and external process. On the orchestration end, these match a corresponding `PipesLambdaEventContextInjector` and `PipesLambdaLogsMessageReader`, which are instantiated inside the . + We're using the default context loader () and message writer () in this example. These objects establish communication between the orchestration and external process. On the orchestration end, these match a corresponding `PipesLambdaEventContextInjector` and `PipesLambdaLogsMessageReader`, which are instantiated inside the . -- **Inside the body of the context manager (), retrieve a log and report an asset materialization.** These calls use the temporary communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes documentation](/guides/dagster-pipes). +- **Inside the body of the context manager (), retrieve a log and report an asset materialization.** These calls use the temporary communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes documentation](/concepts/dagster-pipes). At this point you can execute the rest of your AWS Lambda code as normal, invoking various APIs as needed. @@ -173,22 +170,24 @@ In this step, you’ll create a Dagster asset that, when materialized, opens a D In your Dagster project, create a file named `dagster_lambda_pipes.py` and paste in the following code: -```python +```python file=/guides/dagster/dagster_pipes/lambda/dagster_code.py startafter=start_asset_marker endbefore=end_asset_marker # dagster_lambda_pipes.py import boto3 +from dagster_aws.pipes import PipesLambdaClient + from dagster import AssetExecutionContext, Definitions, asset -from dagster_aws import PipesLambdaClient @asset -def lambda_pipes_asset(context: AssetExecutionContext, lambda_pipes_client: PipesLambdaClient): - return lambda_pipes_client.run( - context: context, - client: boto3.client("lambda"), - function_name: "dagster_pipes_function", - event: {"some_parameter_value": 1}, - ).get_materialize_result() +def lambda_pipes_asset( + context: AssetExecutionContext, lambda_pipes_client: PipesLambdaClient +): + return lambda_pipes_client.run( + context=context, + function_name="dagster_pipes_function", + event={"some_parameter_value": 1}, + ).get_materialize_result() ``` Here’s what we did in this example: @@ -197,16 +196,15 @@ Here’s what we did in this example: - Provided as the `context` argument to the asset. This object provides access to system APIs such as resources, config, and logging. -- Specified a resource for the asset to use, , which is a pre-built Dagster resource that allows you to quickly get Pipes working with AWS Lambda. +- Specified a resource for the asset to use, , which is a pre-built Dagster resource that allows you to quickly get Pipes working with AWS Lambda. We also specified the following for the resource: - `context` - The asset's `context` () data - - `client` - A boto client that can authenticate to AWS - `function_name` - The name or ARN of the function to invoke. This info can be found on the function's details page in AWS. In our example, the function is named `dagster_pipes_function` - `event` - A JSON-serializable object containing data to pass as input to the Lambda function - This argument is passed to the `run` method of , which submits the provided information to the [boto `invoke` API](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda/client/invoke.html) and then invokes the specified function (`function_name`). + This argument is passed to the `run` method of , which submits the provided information to the [boto `invoke` API](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/lambda/client/invoke.html) and then invokes the specified function (`function_name`). - Returned a object representing the result of execution. This is obtained by calling `get_materialize_result` on the object returned by `run` after the execution in AWS Lambda has completed. @@ -216,46 +214,16 @@ Next, you’ll add the asset and AWS Lambda resource to your project’s code lo Copy and paste the following to the bottom of `dagster_lambda_pipes.py`: -```python -# dagster_lambda_pipes.py - -defs = Definitions( - assets=[lambda_pipes_asset], - resources={ - "lambda_pipes_client": PipesLambdaClient(), - }, -) -``` - -At this point, `dagster_lambda_pipes.py` should look like the following: - -```python +```python file=/guides/dagster/dagster_pipes/lambda/dagster_code.py startafter=start_definitions_marker endbefore=end_definitions_marker # dagster_lambda_pipes.py -import boto3 -from dagster import AssetExecutionContext, Definitions, asset -from dagster_aws import PipesLambdaClient - - -@asset -def lambda_pipes_asset(context: AssetExecutionContext, lambda_pipes_client: PipesLambdaClient): - return lambda_pipes_client.run( - context: context, - client: boto3.client("lambda"), - function_name: "dagster_pipes_function", - event: {"some_parameter_value": 1}, - ).get_materialize_result() - - defs = Definitions( - assets=[lambda_pipes_asset], - resources={ - "lambda_pipes_client": PipesLambdaClient(), - }, + assets=[lambda_pipes_asset], + resources={"lambda_pipes_client": PipesLambdaClient(client=boto3.client("lambda"))}, ) ``` ---- +Sometimes, you may want to transition data pipelines between development and production environments without minimal code changes. To do so, you can use the [Resources](/concepts/resources) system to vary the Pipes clients based on different deployments. For example, you can specify different configured boto3 clients. Or, you may handle the switch by swapping underlying AWS environment variables between deployments. For more info, check out detailed guides in [Transitioning Data Pipelines from Development to Production](/guides/dagster/transitioning-data-pipelines-from-development-to-production) and [Testing against production with Dagster+ Branch Deployments](/guides/dagster/branch_deployments). ## Step 3: Invoke the AWS Lambda function from the Dagster UI @@ -287,11 +255,11 @@ In this step, you’ll invoke the AWS Lambda function you defined in [Step 1](#s , ), others will need a greater level of control over Pipes. This is particularly the case for users seeking to connect large existing codebases to Dagster. +[Dagster Pipes](/concepts/dagster-pipes) is a toolkit for integrating Dagster with an arbitrary external compute environment. While many users will be well-served by the simplified interface offered by Pipes client objects (e.g. , ), others will need a greater level of control over Pipes. This is particularly the case for users seeking to connect large existing codebases to Dagster. This guide will cover the lower level Pipes APIs and how you can compose them to provide a custom solution for your data platform. @@ -197,7 +197,7 @@ from third_party_api import ( from dagster import ( AssetExecutionContext, - PipesResult, + PipesExecutionResult, PipesTempFileContextInjector, PipesTempFileMessageReader, asset, @@ -206,7 +206,7 @@ from dagster import ( @asset -def some_pipes_asset(context: AssetExecutionContext) -> Iterator[PipesResult]: +def some_pipes_asset(context: AssetExecutionContext) -> Iterator[PipesExecutionResult]: with open_pipes_session( context=context, extras={"foo": "bar"}, @@ -279,7 +279,7 @@ with open_dagster_pipes( # ... business logic - # Creates a `MaterializationResult` on the orchestration side. Notice no value for the asset is + # Creates a `MaterializeResult` on the orchestration side. Notice no value for the asset is # included. Pipes only supports reporting that a materialization occurred and associated # metadata. pipes.report_asset_materialization( @@ -487,10 +487,10 @@ class MyCustomCloudServiceMessageWriterChannel(PipesBlobStoreMessageWriterChanne diff --git a/docs/content/concepts/dagster-pipes/databricks.mdx b/docs/content/concepts/dagster-pipes/databricks.mdx new file mode 100644 index 0000000000000..8d465762a5b29 --- /dev/null +++ b/docs/content/concepts/dagster-pipes/databricks.mdx @@ -0,0 +1,392 @@ +--- +title: "Integrating Databricks with Dagster Pipes | Dagster Docs" +description: "Learn to integrate Dagster Pipes with Databricks to launch external code from Dagster assets." +--- + +# Integrating Databricks with Dagster Pipes + +In this guide, we’ll show you how to use [Dagster Pipes](/concepts/dagster-pipes) with Dagster’s Databricks integration to launch Databricks jobs. + +Pipes allows your Databricks jobs to stream logs (including `stdout` and `stderr` of the driver process) and events back to Dagster. This does not require a full Dagster environment on Databricks; instead: + +1. The Databricks environment needs to include [`dagster-pipes`](https://pypi.org/project/dagster-pipes), a single-file Python package with no dependencies that can be installed from PyPI or easily vendored, and +2. Databricks jobs must be launched from Dagster + +--- + +## Prerequisites + +To use Dagster Pipes with Databricks: + +- **In the orchestration environment**, you'll need to install the following packages: + + ```shell + pip install dagster dagster-webserver dagster-databricks + ``` + + Refer to the [Dagster installation guide](/getting-started/install) for more info. + +- **In Databricks**, you'll need: + + - **A Databricks workspace**. If you don’t have this, follow the [Databricks quickstart](https://docs.databricks.com/workflows/jobs/jobs-quickstart.html) to set one up. + - **The following information about your Databricks workspace**: + + - `host` - The host URL of your Databricks workspace, ex: `https://dbc-xxxxxxx-yyyy.cloud.databricks.com/` + - `token` - A personal access token for the Databricks workspace. Refer to the Databricks API authentication documentation for more info about retrieving these values. + + You should set and export the Databricks host and token environment variables in your shell session: + + ```shell + export DATABRICKS_HOST= + export DATABRICKS_TOKEN + ``` + +--- + +## Step 1: Create an asset computed in Databricks + +In this step, you’ll create a Dagster asset that, when materialized, opens a Dagster pipes session and launches a Databricks job. + +### Step 1.1: Define the Dagster asset + +In your Dagster project, create a file named `dagster_databricks_pipes.py` and paste in the following code: + +```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_client.py startafter=start_databricks_asset endbefore=end_databricks_asset +### dagster_databricks_pipes.py + +import os +import sys + +from dagster_databricks import PipesDatabricksClient + +from dagster import AssetExecutionContext, Definitions, EnvVar, asset +from databricks.sdk import WorkspaceClient +from databricks.sdk.service import jobs + + +@asset +def databricks_asset( + context: AssetExecutionContext, pipes_databricks: PipesDatabricksClient +): + task = jobs.SubmitTask.from_dict( + { + # The cluster settings below are somewhat arbitrary. Dagster Pipes is + # not dependent on a specific spark version, node type, or number of + # workers. + "new_cluster": { + "spark_version": "12.2.x-scala2.12", + "node_type_id": "i3.xlarge", + "num_workers": 0, + "cluster_log_conf": { + "dbfs": {"destination": "dbfs:/cluster-logs-dir-noexist"}, + }, + }, + "libraries": [ + # Include the latest published version of dagster-pipes on PyPI + # in the task environment + {"pypi": {"package": "dagster-pipes"}}, + ], + "task_key": "some-key", + "spark_python_task": { + "python_file": "dbfs:/my_python_script.py", # location of target code file + "source": jobs.Source.WORKSPACE, + }, + } + ) + + print("This will be forwarded back to Dagster stdout") + print("This will be forwarded back to Dagster stderr", file=sys.stderr) + + extras = {"some_parameter": 100} + + return pipes_databricks.run( + task=task, + context=context, + extras=extras, + ).get_materialize_result() +``` + +Let's review what's happening in this code: + +- **Includes a number of imports from Dagster and the Databricks SDK.** There are a few that aren't used in this code block, but will be later in this guide. + +- **Creates an asset named `databricks_asset`.** We also: + + - Provided as the `context` argument to the asset. This object provides access to system APIs such as resources, config, and logging. We’ll come back to this a bit later in this section. + - Specified a resource for the asset to use. We’ll also come back to this later. + +- **Defines a Databricks `SubmitTask` object in the asset body.** Coverage of all the fields on this object is beyond the scope of this guide, but you can find further information in the [Databricks SDK API docs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/jobs/jobs.html) and [source code](https://github.com/databricks/databricks-sdk-py/blob/main/databricks/sdk/service/jobs.py) for the `SubmitTask` object. + + The submitted task must: + + - **Specify `dagster-pipes` as a PyPI dependency**. You can include a version pin (e.g. `dagster-pipes==1.5.4`) if desired. + - Use a `spark_python_task`. + - Specify either `new_cluster` (this is the **recommended approach**) or `existing_cluster_id`. The `new_cluster` field is used in this example. + - If `new_cluster` is set, then setting `new_cluster.cluster_log_conf.dbfs` enables the to automatically set up objects for `stdout` and `stderr` of the driver process. These will periodically forward the `stdout` and `stderr` logs written by Databricks back to Dagster. **Note**: Because Databricks only updates these log files every five minutes, that is the maximum frequency at which Dagster can forward the logs. + - If `existing_cluster_id` is set, won't be able to forward `stdout` and `stderr` driver logs to Dagster. Using an existing cluster **requires passing an instance of to ** in the Python script which is executed on Databricks. This is because setting environment variables is only possible when creating a new cluster, so we have to use the alternative method of passing Pipes parameters as command-line arguments. + +- **Defines an `extras` dictionary containing some arbitrary data (`some_parameter`).** This is where you can put various data, e.g. from the Dagster run config, that you want to be available in Databricks. Anything added here must be JSON-serializable. + +- **Passes the `SubmitTask` object, `AssetExecutionContext`, and `extras` dictionary to the `run` method of **. This method synchronously executes the Databricks job specified by the `SubmitTask` object. It slightly modifies the object by injecting some environment variables under `new_cluster.spark_env_vars` before submitting the object to the Databricks API. + +- **Returns a object representing the result of execution**. This is obtained by calling `get_materialize_result` on the object returned by `run` after the Databricks job has finished. **Note**: Execution can take several minutes even for trivial scripts due to Databricks cluster provisioning times. + +### Step 1.2: Define the Databricks Pipes client and definitions + +The [`dagster-databricks`](/\_apidocs/libraries/dagster-databricks) library provides a , which is a pre-built Dagster resource that allows you to quickly get Pipes working with your Databricks workspace. + +Add the following to the bottom of `dagster_databricks_pipes.py` to define the resource and a object that binds it to the `databricks_asset`: + +```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_client.py startafter=start_definitions endbefore=end_definitions +pipes_databricks_resource = PipesDatabricksClient( + client=WorkspaceClient( + host=os.environ["DATABRICKS_HOST"], + token=os.environ["DATABRICKS_TOKEN"], + ) +) + +defs = Definitions( + assets=[databricks_asset], resources={"pipes_databricks": pipes_databricks_resource} +) +``` + +--- + +## Step 2: Write a script for execution on Databricks + +The next step is to write the code that will be executed on Databricks. In the Databricks task specification in [Step 1.1](#step-11-define-the-dagster-asset), we referenced a file `dbfs:/my_python_script.py` in the `spark_python_task`: + +```python +"spark_python_task": { + "python_file": "dbfs:/my_python_script.py", # location of target code file + "source": jobs.Source.WORKSPACE, +} +``` + +We'll create this script from scratch and upload it to DBFS. You can use the Databricks UI or run a command from a shell to do this. To use the shell method, run: + +```shell +dbfs cp my_python_script.py dbfs:/my_python_script.py +``` + +Let's look at the script itself: + +```python file=/guides/dagster/dagster_pipes/databricks/databricks_script.py +### dbfs:/my_python_script.py + +# `dagster_pipes` must be available in the databricks python environment +from dagster_pipes import ( + PipesDbfsContextLoader, + PipesDbfsMessageWriter, + open_dagster_pipes, +) + +# Sets up communication channels and downloads the context data sent from Dagster. +# Note that while other `context_loader` and `message_writer` settings are +# possible, it is recommended to use `PipesDbfsContextLoader` and +# `PipesDbfsMessageWriter` for Databricks. +with open_dagster_pipes( + context_loader=PipesDbfsContextLoader(), + message_writer=PipesDbfsMessageWriter(), +) as pipes: + # Access the `extras` dict passed when launching the job from Dagster. + some_parameter_value = pipes.get_extra("some_parameter") + + # Stream log message back to Dagster + pipes.log.info(f"Using some_parameter value: {some_parameter_value}") + + # ... your code that computes and persists the asset + + # Stream asset materialization metadata and data version back to Dagster. + # This should be called after you've computed and stored the asset value. We + # omit the asset key here because there is only one asset in scope, but for + # multi-assets you can pass an `asset_key` parameter. + pipes.report_asset_materialization( + metadata={ + "some_metric": {"raw_value": some_parameter_value + 1, "type": "int"} + }, + data_version="alpha", + ) +``` + +Before we go any futher, let's review what this script does: + +- **Imports `PipesDbfsContextLoader`, `PipesDbfsMessageWriter`, and `open_dagster_pipes` from `dagster_pipes`.** The and are DBFS-specific implementations of the and . Refer to the [Dagster Pipes details and customization Guide](/concepts/dagster-pipes/dagster-pipes-details-and-customization) for protocol details. + + Both objects write temporary files on DBFS for communication between the orchestration and external process. The and match a corresponding `PipesDbfsContextInjector` and `PipesDbfsMessageReader` on the orchestration end, which are instantiated inside the . + +- **Passes the context loader and message writer to the context manager**, which yields an instance of called `pipes`. **Note**: when using `existing_cluster_id`, you must also import `PipesCliArgsParamsLoader` and pass an instance of it to `open_dagster_pipes` as the `params_loader` parameter. + + Inside the body of the context manager are various calls against `pipes` to retrieve an extra, log, and report an asset materialization. All of these calls will use the DBFS temporary file-based communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes guide](/concepts/dagster-pipes). + +At this point you can execute the rest of your Databricks code as normal, invoking various APIs as needed. + +#### Existing codebases + +For illustrative purposes, we've created a Python script from scratch. However, you may want to apply Pipes to an existing codebase. + +One approach that can be useful is to wrap the context manager around an existing `main` function or entry point. You can either pass the down through your business logic, or simply report an asset materialization after your business logic is done: + +```python file=/guides/dagster/dagster_pipes/databricks/databricks_script_existing.py +from dagster_pipes import ( + PipesDbfsContextLoader, + PipesDbfsMessageWriter, + open_dagster_pipes, +) + +# ... existing code + +if __name__ == "__main__": + with open_dagster_pipes( + context_loader=PipesDbfsContextLoader(), + message_writer=PipesDbfsMessageWriter(), + ) as pipes: + # ... existing logic + pipes.report_asset_materialization( + asset_key="foo", + metadata={"some_key": "some_value"}, + data_version="alpha", + ) +``` + +--- + +## Step 3: Run the Databricks job from the Dagster UI + +In this step, you’ll run the Databricks job you created in [Step 1.2](#step-12-define-the-databricks-pipes-client-and-definitions) from the Dagster UI. + +1. In a new command line session, run the following to start the UI: + + ```shell + dagster dev -f dagster_databricks_pipes.py + ``` + +2. Navigate to [localhost:3000](http://localhost:3000/), where you should see the UI: + + Databricks asset + +3. Click **Materialize** near the top right corner of the page, then click **View** on the **Launched Run** popup. Wait for the run to complete, and the event log should look like this: + + Event log for Databricks run + +--- + +## Advanced: Customization using open_pipes_session + +The is a high-level API that doesn't cover all use cases. If you have existing code to launch/poll the job you do not want to change, you want to stream back materializations as they occur, or you just want more control than is permitted by , you can use instead of . + +To use : + +1. Your Databricks job be launched within the scope of the context manager; and +2. Your job is launched on a cluster containing the environment variables available on the yielded `pipes_session` + +While your Databricks code is running, any calls to `report_asset_materialization` in the external script are streamed back to Dagster, causing a `MaterializeResult` object to be buffered on the `pipes_session`. You can either: + +- Leave these objects buffered until execution is complete (**Option 1** in below example code), or +- Stream them to Dagster machinery during execution by calling `yield pipes_session.get_results()` (**Option 2**) + +With either option, once the block closes, you must call `yield pipes_session.get_results()` to yield any remaining buffered results, since we cannot guarantee that all communications from Databricks have been processed until the `open_pipes_session` block closes. + +```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_open_pipes_session.py +import os +import sys + +from dagster_databricks import PipesDbfsContextInjector, PipesDbfsMessageReader +from dagster_databricks.pipes import PipesDbfsLogReader + +from dagster import AssetExecutionContext, asset, open_pipes_session +from databricks.sdk import WorkspaceClient + + +@asset +def databricks_asset(context: AssetExecutionContext): + client = WorkspaceClient( + host=os.environ["DATABRICKS_HOST"], + token=os.environ["DATABRICKS_TOKEN"], + ) + + # Arbitrary json-serializable data you want access to from the `PipesContext` + # in the Databricks runtime. Assume `sample_rate` is a parameter used by + # the target job's business logic. + extras = {"sample_rate": 1.0} + + # Sets up Pipes communications channels + with open_pipes_session( + context=context, + extras=extras, + context_injector=PipesDbfsContextInjector(client=client), + message_reader=PipesDbfsMessageReader( + client=client, + # These log readers are optional. If you provide them, then you must set the + # `new_cluster.cluster_log_conf.dbfs.destination` field in the job you submit to a valid + # DBFS path. This will configure Databricks to write stdout/stderr to the specified + # location every 5 minutes. Dagster will poll this location and forward the + # stdout/stderr logs every time they are updated to the orchestration process + # stdout/stderr. + log_readers=[ + PipesDbfsLogReader( + client=client, remote_log_name="stdout", target_stream=sys.stdout + ), + PipesDbfsLogReader( + client=client, remote_log_name="stderr", target_stream=sys.stderr + ), + ], + ), + ) as pipes_session: + ##### Option (1) + # NON-STREAMING. Just pass the necessary environment variables down. + # During execution, all reported materializations are buffered on the + # `pipes_session`. Yield them all after Databricks execution is finished. + + # Dict[str, str] with environment variables containing Pipes comms info. + env_vars = pipes_session.get_bootstrap_env_vars() + + # Some function that handles launching/monitoring of the Databricks job. + # It must ensure that the `env_vars` are set on the executing cluster. + custom_databricks_launch_code(env_vars) + + ##### Option (2) + # STREAMING. Pass `pipes_session` down. During execution, you can yield any + # asset materializations that have been reported by calling ` + # pipes_session.get_results()` as often as you like. `get_results` returns + # an iterator that your custom code can `yield from` to forward the + # results back to the materialize function. Note you will need to extract + # the env vars by calling `pipes_session.get_pipes_bootstrap_env_vars()`, + # and launch the Databricks job in the same way as with (1). + + # The function should return an `Iterator[MaterializeResult]`. + yield from custom_databricks_launch_code(pipes_session) + + # With either option (1) or (2), this is required to yield any remaining + # buffered results. + yield from pipes_session.get_results() +``` + +--- + +## Related + + + + + + diff --git a/docs/content/guides/dagster-pipes/integrating-docker-with-dagster-pipes.mdx b/docs/content/concepts/dagster-pipes/integrating-docker-with-dagster-pipes.mdx similarity index 100% rename from docs/content/guides/dagster-pipes/integrating-docker-with-dagster-pipes.mdx rename to docs/content/concepts/dagster-pipes/integrating-docker-with-dagster-pipes.mdx diff --git a/docs/content/concepts/dagster-pipes/kubernetes.mdx b/docs/content/concepts/dagster-pipes/kubernetes.mdx new file mode 100644 index 0000000000000..129d8297062c8 --- /dev/null +++ b/docs/content/concepts/dagster-pipes/kubernetes.mdx @@ -0,0 +1,250 @@ +--- +title: "Integrating Kubernetes with Dagster Pipes | Dagster Docs" +description: "Learn to integrate Dagster Pipes with Kubernetes to launch external code from Dagster assets." +--- + +# Integrating Kubernetes with Dagster Pipes + + + Heads up! This guide focuses on using an out-of-the-box + Kubernetes resource. For further customization, use the{" "} + + open_pipes_session approach + {" "} + instead. + + +In this guide, we’ll show you how to use [Dagster Pipes](/concepts/dagster-pipes) with Dagster’s Kubernetes integration to launch Kubernetes pods and execute external code. + +Pipes allows your code to interact with Dagster outside of a full Dagster environment. Instead, the environment only needs to contain `dagster-pipes`, a single-file Python package with no dependencies that can be installed from PyPI or easily vendored. `dagster-pipes` handles streaming `stdout`/`stderr` and Dagster events back to the orchestration process. + +--- + +## Prerequisites + +To use Dagster Pipes with Kubernetes, you’ll need: + +- **In the orchestration environment**, you'll need to install the following packages: + + ```shell + pip install dagster dagster-webserver dagster-k8s + ``` + + Refer to the [Dagster installation guide](/getting-started/install) for more info. + +- **A Kubernetes cluster**. This can be an existing cluster. Or, if working locally, you can use [kind](https://kind.sigs.k8s.io/) or [Docker Desktop](https://docs.docker.com/desktop/kubernetes/). + +--- + +## Step 1: Define the external Kubernetes code container + +In this step, you’ll create a Kubernetes container image that runs some code that uses `dagster-pipes`. + +### Step 1.1: Write a Python script + +First, you'll write a Python script that uses `dagster-pipes` and is executed in a container via Kubernetes: + +```python +# my_python_script.py + +from dagster_pipes import open_dagster_pipes + +with open_dagster_pipes() as pipes: + # Stream log message back to Dagster + pipes.log.info(f"Using some_parameter value: {pipes.get_extra('some_parameter')}") + + # ... your code that computes and persists the asset + + pipes.report_asset_materialization( + metadata={ + "some_metric": {"raw_value": 2, "type": "int"} + }, + data_version="alpha", + ) +``` + +Let's review what this code does: + +- Imports from `dagster_pipes` + +- **Initializes the Dagster Pipes context (), which yields an instance of called `pipes`.** + + We're using the default context loader () and message writer () in this example. These objects establish communication between the orchestration and external process. On the orchestration end, these match a corresponding `PipesContextInjector` and `PipesMessageReader`, which are instantiated inside the . + +- **Inside the body of the context manager (), retrieve a log and report an asset materialization.** These calls use the temporary communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes documentation](/concepts/dagster-pipes). + +At this point you can execute the rest of your Kubernetes code as normal, invoking various APIs as needed. + +### Step 1.2: Define and build the container image + +Next, you'll package the script into a container image using a `Dockerfile`. For example: + +```dockerfile +FROM python:3.10-slim + +RUN pip install dagster-pipes + +COPY my_python_script.py . + +ENTRYPOINT [ "python","my_python_script.py" ] +``` + +Then, build the image: + +```shell +docker build -t pipes-example:v1 . +``` + +**Note**: Depending on the Kubernetes setup you're using, you may need to upload the container image to a registry or otherwise make it available to the cluster. For example: `kind load docker-image pipes-example:v1` + +--- + +## Step 2: Create the Dagster objects + +In this step, you’ll create a Dagster asset that, when materialized, opens a Dagster pipes session and spins up a Kubernetes pod to execute the container created in the previous step. + +### Step 2.1: Define the Dagster asset + +In your Dagster project, create a file named `dagster_k8s_pipes.py` and paste in the following code: + +```python +# dagster_k8s_pipes.py + +from dagster import AssetExecutionContext, Definitions, asset +from dagster_k8s import PipesK8sClient + + +@asset +def k8s_pipes_asset(context: AssetExecutionContext, k8s_pipes_client: PipesK8sClient): + return k8s_pipes_client.run( + context=context, + image="pipes-example:v1", + extras={ + "some_parameter": 1 + } + ).get_materialize_result() +``` + +Here’s what we did in this example: + +- Created an asset named `k8s_pipes_asset` + +- Provided as the `context` argument to the asset. This object provides access to system APIs such as resources, config, and logging. + +- Specified a resource for the asset to use, , which is a pre-built Dagster resource that allows you to quickly get Pipes working with Kubernetes. + + We also specified the following for the resource: + + - `context` - The asset's `context` () data + - `image` - The Kubernetes image we created in [Step 1](#step-1-define-the-external-kubernetes-code-container) + + These arguments are passed to the `run` method of , which submits the provided cluster information to the Kubernetes API and then runs the specified `image`. + +- Returned a object representing the result of execution. This is obtained by calling `get_materialize_result` on the object returned by `run` after the execution in Kubernetes has completed. + + + Heads up! Depending on your Kubernetes setup, there may be a + few additional things you need to do: +
      +
    • + + If the default behavior doesn't target the correct cluster + + , supply the load_incluster_config, + kubeconfig_file + , and kube_context arguments on +
    • +
    • + If you need to alter default spec behaviors, use + arguments on PipesK8sClient.run such as{" "} + base_pod_spec +
    • +
    +
    + +### Step 2.2: Create Dagster Definitions + +Next, you’ll add the asset and Kubernetes resource to your project’s code location via the object. This makes the resource available to [other Dagster definitions in the project](/concepts/code-locations). + +Copy and paste the following to the bottom of `dagster_k8s_pipes.py`: + +```python +# dagster_k8s_pipes.py + +defs = Definitions( + assets=[k8s_pipes_asset], + resources={ + "k8s_pipes_client": PipesK8sClient(), + }, +) +``` + +At this point, `dagster_k8s_pipes.py` should look like the following: + +```python +# dagster_k8s_pipes.py + +from dagster import AssetExecutionContext, Definitions, asset +from dagster_k8s import PipesK8sClient + + +@asset +def k8s_pipes_asset(context: AssetExecutionContext, k8s_pipes_client: PipesK8sClient): + return k8s_pipes_client.run( + context=context, + image="pipes-example:v1", + extras={ + "some_parameter": 1 + } + ).get_materialize_result() + + +defs = Definitions( + assets=[k8s_pipes_asset], + resources={ + "k8s_pipes_client": PipesK8sClient(), + }, +) +``` + +--- + +## Step 3: Launch the Kubernetes container from the Dagster UI + +In this step, you’ll run the Kubernetes container you defined in [Step 1](#step-1-define-the-external-kubernetes-code-container) from the Dagster UI. + +1. In a new command line session, run the following to start the UI: + + ```python + dagster dev -f dagster_k8s_pipes.py + ``` + +2. Navigate to [localhost:3000](http://localhost:3000/), where you should see the UI. + +3. Click **Materialize** near the top right corner of the page, then click **View** on the **Launched Run** popup. Wait for the run to complete, and the event log should look like this: + + Event log for Kubernetes run + +--- + +## Related + + + + + + diff --git a/docs/content/concepts/dagster-pipes/subprocess.mdx b/docs/content/concepts/dagster-pipes/subprocess.mdx new file mode 100644 index 0000000000000..07fafdf686c8c --- /dev/null +++ b/docs/content/concepts/dagster-pipes/subprocess.mdx @@ -0,0 +1,54 @@ +--- +title: "Dagster Pipes tutorial | Dagster Docs" +description: "Learn how to use Dagster Pipes's built-in subprocess implementation to invoke a subprocess with a given command and environment" +--- + +# Dagster Pipes tutorial + +In this guide, we’ll show you how to use [Dagster Pipes](/concepts/dagster-pipes) with Dagster’s built-in subprocess to run a local subprocess with a given command and environment. You can then send information such as structured metadata and logging back to Dagster from the subprocess, where it will be visible in the Dagster UI. + +To get there, you'll: + +- [Create a Dagster asset that invokes a subprocess](/concepts/dagster-pipes/subprocess/create-subprocess-asset) +- [Modify existing code to work with Dagster Pipes to send information back to Dagster](/concepts/dagster-pipes/subprocess/modify-external-code) +- Learn about using Dagster Pipes with other entities in the Dagster system in the [Reference](/concepts/dagster-pipes/subprocess/reference) section + + + +This guide focuses on using an out-of-the-box resource. For further customization with the subprocess invocation, use approach instead. + + + + + + + +--- + +## Prerequisites + +To use Dagster Pipes to run a subprocess, you’ll need to have Dagster (`dagster`) and the Dagster UI (`dagster-webserver`) installed. Refer to the [Installation guide](/getting-started/install) for more info. + +You'll also need **an existing Python script.** We’ll use the following Python script to demonstrate. This file will be invoked by the Dagster asset that you’ll create later in this tutorial. + +Create a file named `external_code.py` and paste the following into it: + +```python file=/guides/dagster/dagster_pipes/subprocess/part_1/external_code.py lines=2- +import pandas as pd + + +def main(): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + total_orders = len(orders_df) + print(f"processing total {total_orders} orders") + + +if __name__ == "__main__": + main() +``` + +--- + +## Ready to get started? + +When you've fulfilled all the prerequisites for the tutorial, you can get started by [creating a Dagster asset that executes a subprocess](/concepts/dagster-pipes/subprocess/create-subprocess-asset). diff --git a/docs/content/guides/dagster-pipes/subprocess/create-subprocess-asset.mdx b/docs/content/concepts/dagster-pipes/subprocess/create-subprocess-asset.mdx similarity index 88% rename from docs/content/guides/dagster-pipes/subprocess/create-subprocess-asset.mdx rename to docs/content/concepts/dagster-pipes/subprocess/create-subprocess-asset.mdx index a0c6f538c8986..dcfb0628178ca 100644 --- a/docs/content/guides/dagster-pipes/subprocess/create-subprocess-asset.mdx +++ b/docs/content/concepts/dagster-pipes/subprocess/create-subprocess-asset.mdx @@ -7,7 +7,7 @@ description: "Learn how to create a Dagster asset that invokes a subprocess that -This is part one of the [Using Dagster Pipes](/guides/dagster-pipes/subprocess) tutorial. If you are looking for how to modify your existing code that is already being orchestrated by Dagster, you can jump to Part 2: Modify external code. +This is part one of the [Using Dagster Pipes](/concepts/dagster-pipes/subprocess) tutorial. If you are looking for how to modify your existing code that is already being orchestrated by Dagster, you can jump to Part 2: Modify external code. @@ -17,7 +17,7 @@ In this part of the tutorial, you'll create a Dagster asset that, in its executi ## Step 1: Define the Dagster asset -Before getting started, make sure you have fulfilled all the [prerequisites](/guides/dagster-pipes/subprocess#prerequisites) for the tutorial. You should have a standalone Python script named `external_code.py` which looks like the following: +Before getting started, make sure you have fulfilled all the [prerequisites](/concepts/dagster-pipes/subprocess#prerequisites) for the tutorial. You should have a standalone Python script named `external_code.py` which looks like the following: ```python file=/guides/dagster/dagster_pipes/subprocess/part_1/external_code.py lines=2- import pandas as pd @@ -35,7 +35,7 @@ if __name__ == "__main__": ### Step 1.1: Define the asset -First, create a new file named `dagster_code.py` in the same directory as the `external_code.py` file you created earlier in the [Prerequisites](/guides/dagster-pipes/subprocess#prerequisites) step. +First, create a new file named `dagster_code.py` in the same directory as the `external_code.py` file you created earlier in the [Prerequisites](/concepts/dagster-pipes/subprocess#prerequisites) step. Next, you’ll define the asset. Copy and paste the following into the file: @@ -104,7 +104,7 @@ Let’s take a look at what this code does: ## Step 2: Define a Definitions object -To make the asset and subprocess resource loadable and accessible by Dagster's tools, such as the CLI, UI, and Dagster Cloud, you’ll create a object that contains them. +To make the asset and subprocess resource loadable and accessible by Dagster's tools, such as the CLI, UI, and Dagster+, you’ll create a object that contains them. Copy and paste the following to the bottom of `dagster_code.py`: @@ -200,4 +200,4 @@ In this step, you’ll execute the subprocess asset you created in earlier steps ## What's next? -At this point, you've created a Dagster asset that invokes an external Python script, launched the code in a subprocess, and viewed the result in Dagster UI. Next, you'll learn how to [modify your external code to work with Dagster Pipes](/guides/dagster-pipes/subprocess/modify-external-code) to send information back to Dagster. +At this point, you've created a Dagster asset that invokes an external Python script, launched the code in a subprocess, and viewed the result in Dagster UI. Next, you'll learn how to [modify your external code to work with Dagster Pipes](/concepts/dagster-pipes/subprocess/modify-external-code) to send information back to Dagster. diff --git a/docs/content/guides/dagster-pipes/subprocess/modify-external-code.mdx b/docs/content/concepts/dagster-pipes/subprocess/modify-external-code.mdx similarity index 93% rename from docs/content/guides/dagster-pipes/subprocess/modify-external-code.mdx rename to docs/content/concepts/dagster-pipes/subprocess/modify-external-code.mdx index 511396685915a..4b66ba247ba28 100644 --- a/docs/content/guides/dagster-pipes/subprocess/modify-external-code.mdx +++ b/docs/content/concepts/dagster-pipes/subprocess/modify-external-code.mdx @@ -7,7 +7,7 @@ description: "With Dagster Pipes, you can incorporate existing code into Dagster -This is part two of the [Using Dagster Pipes](/guides/dagster-pipes/subprocess) tutorial. +This is part two of the [Using Dagster Pipes](/concepts/dagster-pipes/subprocess) tutorial. @@ -16,7 +16,7 @@ At this point, you should have two files: - `external_code.py` which is a standalone Python script that you want to orchestrate with Dagster. - `dagster_code.py` which includes a Dagster asset and other Dagster definitions. -In this section, you'll learn how to modify the standalone Python script to work with [Dagster Pipes](/guides/dagster-pipes) in order to stream information back to Dagster. To do this, you'll: +In this section, you'll learn how to modify the standalone Python script to work with [Dagster Pipes](/concepts/dagster-pipes) in order to stream information back to Dagster. To do this, you'll: - [Make Dagster context available in external code](#step-1-make-dagster-context-available-in-external-code) - [Stream log messages back to Dagster](#step-2-send-log-messages-to-dagster) @@ -102,7 +102,7 @@ Sometimes, you may want to log information from your external code as structured ### Report asset materialization -Similar to [reporting materialization metadata within the Dagster process](/concepts/assets/software-defined-assets#recording-materialization-metadata), you can also report asset materialization back to Dagster from the external process. +Similar to [reporting materialization metadata within the Dagster process](/concepts/metadata-tags/asset-metadata), you can also report asset materialization back to Dagster from the external process. In this example, we’re passing a piece of metadata named `total_orders` to the `metadata` parameter of the . This payload will be sent from the external process back to Dagster: @@ -318,9 +318,9 @@ In this tutorial, you learned how to get access to Dagster Pipes context, report What's next? From here, you can: -- Learn about other capabilities of executing external code in subprocess via Dagster Pipes in the [Subprocess reference](/guides/dagster-pipes/subprocess/reference) -- Learn how to [customize your own Dagster Pipes protocols](/guides/dagster-pipes/dagster-pipes-details-and-customization) +- Learn about other capabilities of executing external code in subprocess via Dagster Pipes in the [Subprocess reference](/concepts/dagster-pipes/subprocess/reference) +- Learn how to [customize your own Dagster Pipes protocols](/concepts/dagster-pipes/dagster-pipes-details-and-customization) - + diff --git a/docs/content/concepts/dagster-pipes/subprocess/reference.mdx b/docs/content/concepts/dagster-pipes/subprocess/reference.mdx new file mode 100644 index 0000000000000..9aaa62150ad53 --- /dev/null +++ b/docs/content/concepts/dagster-pipes/subprocess/reference.mdx @@ -0,0 +1,357 @@ +--- +title: "Dagster Pipes subprocess reference | Dagster Docs" +description: "This page shows ways to execute external code with Dagster Pipes with different entities in the Dagster system." +--- + +# Dagster Pipes subprocess reference + +This reference shows usage of Dagster Pipes with other entities in the Dagster system. For a step-by-step walkthrough, refer to the [Dagster Pipes tutorial](/concepts/dagster-pipes/subprocess). + +--- + +## Specifying environment variables and extras + +When launching the subprocess, you may want to make environment variables or additional parameters available in the external process. Extras are arbitrary, user-defined parameters made available on the context object in the external process. + + + + +In the external code, you can access extras via the `PipesContext` object: + +```python file=/guides/dagster/dagster_pipes/subprocess/with_extras_env/external_code.py lines=2- +import os + +import pandas as pd +from dagster_pipes import PipesContext, open_dagster_pipes + + +def main(): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + total_orders = len(orders_df) + # get the Dagster Pipes context + context = PipesContext.get() + # get all extras provided by Dagster asset + print(context.extras) + # get the value of an extra + print(context.get_extra("foo")) + # get env var + print(os.environ["MY_ENV_VAR_IN_SUBPROCESS"]) + + +if __name__ == "__main__": + # connect to Dagster Pipes + with open_dagster_pipes(): + main() +``` + + + + +The `run` method to the `PipesSubprocessClient` resource also accepts `env` and `extras` , which allow you to specify environment variables and extra arguments when executing the subprocess: + +Note: We're using `os.environ` in this example, but Dagster's recommendation is to use in production. + +```python file=/guides/dagster/dagster_pipes/subprocess/with_extras_env/dagster_code.py +import shutil + +from dagster import ( + AssetExecutionContext, + Definitions, + MaterializeResult, + PipesSubprocessClient, + asset, + file_relative_path, +) + + +@asset +def subprocess_asset( + context: AssetExecutionContext, pipes_subprocess_client: PipesSubprocessClient +) -> MaterializeResult: + cmd = [shutil.which("python"), file_relative_path(__file__, "external_code.py")] + return pipes_subprocess_client.run( + command=cmd, + context=context, + extras={"foo": "bar"}, + env={ + "MY_ENV_VAR_IN_SUBPROCESS": "my_value", + }, + ).get_materialize_result() + + +defs = Definitions( + assets=[subprocess_asset], + resources={"pipes_subprocess_client": PipesSubprocessClient()}, +) +``` + + + + +--- + +## Working with @asset_check + +Sometimes, you may not want to materialize an asset, but instead want to report a data quality check result. When your asset has data quality checks defined in : + + + + + +From the external code, you can report to Dagster that an asset check has been performed via . Note that `asset_key` in this case is required, and must match the asset key defined in : + +```python file=/guides/dagster/dagster_pipes/subprocess/with_asset_check/external_code.py +import pandas as pd +from dagster_pipes import PipesContext, open_dagster_pipes + + +def main(): + orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) + # get the Dagster Pipes context + context = PipesContext.get() + # send structured metadata back to Dagster + context.report_asset_check( + asset_key="my_asset", + passed=orders_df[["item_id"]].notnull().all().bool(), + check_name="no_empty_order_check", + ) + + +if __name__ == "__main__": + # connect to Dagster Pipes + with open_dagster_pipes(): + main() +``` + + + + +On Dagster's side, the `PipesClientCompletedInvocation` object returned from `PipesSubprocessClient` includes a `get_asset_check_result` method, which you can use to access the event reported by the subprocess. + +```python file=/guides/dagster/dagster_pipes/subprocess/with_asset_check/dagster_code.py +import shutil + +from dagster import ( + AssetCheckExecutionContext, + AssetCheckResult, + Definitions, + MaterializeResult, + PipesSubprocessClient, + asset, + asset_check, + file_relative_path, +) + + +@asset +def my_asset(): ... + + +@asset_check(asset="my_asset") +def no_empty_order_check( + context: AssetCheckExecutionContext, pipes_subprocess_client: PipesSubprocessClient +) -> AssetCheckResult: + cmd = [ + shutil.which("python"), + file_relative_path(__file__, "external_code.py"), + ] + return pipes_subprocess_client.run( + command=cmd, context=context.op_execution_context + ).get_asset_check_result() + + +defs = Definitions( + assets=[my_asset], + asset_checks=[no_empty_order_check], + resources={"pipes_subprocess_client": PipesSubprocessClient()}, +) +``` + + + + +--- + +## Working with multi-assets + +Sometimes, you may invoke a single call to an API that results in multiple tables being updated, or you may have a single script that computes multiple assets. In these cases, you can use Dagster Pipes to report back on multiple assets at once. + + + + + +**Note**: when working with multi-assets, \` may only be called once per unique asset key. If called more than once, an error similar to the following will surface: + +```bash +Calling {method} with asset key {asset_key} is undefined. Asset has already been materialized, so no additional data can be reported for it +``` + +Instead, you’ll need to set the `asset_key` parameter for each instance of : + +```python file=/guides/dagster/dagster_pipes/subprocess/with_multi_asset/external_code.py +import pandas as pd +from dagster_pipes import PipesContext, open_dagster_pipes + + +def main(): + orders_df = pd.DataFrame( + {"order_id": [1, 2, 3], "item_id": [432, 878, 102], "user_id": ["a", "b", "a"]} + ) + total_orders = len(orders_df) + total_users = orders_df["user_id"].nunique() + + # get the Dagster Pipes context + context = PipesContext.get() + # send structured metadata back to Dagster. asset_key is required when there are multiple assets + context.report_asset_materialization( + asset_key="orders", metadata={"total_orders": total_orders} + ) + context.report_asset_materialization( + asset_key="users", metadata={"total_users": total_users} + ) + + +if __name__ == "__main__": + # connect to Dagster Pipes + with open_dagster_pipes(): + main() +``` + + + + + +In the Dagster code, you can use to define a single asset that represents multiple assets. The `PipesClientCompletedInvocation` object returned from `PipesSubprocessClient` includes a `get_results` method, which you can use to access all the events, such as multiple and , reported by the subprocess: + +```python file=/guides/dagster/dagster_pipes/subprocess/with_multi_asset/dagster_code.py +import shutil + +from dagster import ( + AssetExecutionContext, + AssetSpec, + Definitions, + PipesSubprocessClient, + file_relative_path, + multi_asset, +) + + +@multi_asset(specs=[AssetSpec("orders"), AssetSpec("users")]) +def subprocess_asset( + context: AssetExecutionContext, pipes_subprocess_client: PipesSubprocessClient +): + cmd = [ + shutil.which("python"), + file_relative_path(__file__, "external_code.py"), + ] + return pipes_subprocess_client.run(command=cmd, context=context).get_results() + + +defs = Definitions( + assets=[subprocess_asset], + resources={"pipes_subprocess_client": PipesSubprocessClient()}, +) +``` + + + + +--- + +## Passing custom data + +Sometimes, you may want to pass data back from the external process for use in the orchestration code for purposes other than reporting directly to Dagster such as use in creating an output. In this example we use custom messages to create an I/O managed output that is returned from the asset. + + + + +In the external code, we send messages using `report_custom_message`. The message can be any data that is JSON serializable. + +```python file=/guides/dagster/dagster_pipes/subprocess/custom_messages/external_code.py +import pandas as pd +from dagster_pipes import PipesContext, open_dagster_pipes + + +def main(): + # get the Dagster Pipes context + context = PipesContext.get() + + # compute the full orders data + orders = pd.DataFrame( + { + "order_id": [1, 2, 3], + "item_id": [321, 654, 987], + "order_details": [..., ..., ...], # imagine large data, + # and more columns + } + ) + + # send a smaller table to be I/O managed by Dagster and passed to downstream assets + summary_table = pd.DataFrame(orders[["order_id", "item_id"]]) + context.report_custom_message(summary_table.to_dict()) + + context.report_asset_materialization(metadata={"total_orders": len(orders)}) + + +if __name__ == "__main__": + # connect to Dagster Pipes + with open_dagster_pipes(): + main() +``` + + + + +In the Dagster code we receive custom messages using `get_custom_messages`. + +```python file=/guides/dagster/dagster_pipes/subprocess/custom_messages/dagster_code.py +import shutil + +import pandas as pd + +from dagster import ( + AssetExecutionContext, + Definitions, + Output, + PipesSubprocessClient, + asset, + file_relative_path, +) + + +@asset +def subprocess_asset( + context: AssetExecutionContext, + pipes_subprocess_client: PipesSubprocessClient, +) -> Output[pd.DataFrame]: + cmd = [shutil.which("python"), file_relative_path(__file__, "external_code.py")] + result = pipes_subprocess_client.run( + command=cmd, + context=context, + ) + + # a small summary table gets reported as a custom message + messages = result.get_custom_messages() + if len(messages) != 1: + raise Exception("summary not reported") + + summary_df = pd.DataFrame(messages[0]) + + # grab any reported metadata off of the materialize result + metadata = result.get_materialize_result().metadata + + # return the summary table to be loaded by Dagster for downstream assets + return Output( + value=summary_df, + metadata=metadata, + ) + + +defs = Definitions( + assets=[subprocess_asset], + resources={"pipes_subprocess_client": PipesSubprocessClient()}, +) +``` + + + diff --git a/docs/content/concepts/io-management/io-managers-legacy.mdx b/docs/content/concepts/io-management/io-managers-legacy.mdx index 15955c3c63270..5494834ad5a51 100644 --- a/docs/content/concepts/io-management/io-managers-legacy.mdx +++ b/docs/content/concepts/io-management/io-managers-legacy.mdx @@ -40,7 +40,7 @@ IO Managers are user-provided objects that store asset and op outputs and load t Functions decorated by , , and can have parameters and return values that are loaded from and written to persistent storage. let the user control how this data is stored and how it's loaded in downstream ops and assets. For `@asset` and `@multi_asset`, the IO manager effectively determines where the physical asset lives. -The IO manager APIs make it easy to separate code that's responsible for logical data transformation from code that's responsible for reading and writing the results. Software-defined assets and ops can focus on business logic, while IO managers handle I/O. This separation makes it easier to test the business logic and run it in different environments. +The IO manager APIs make it easy to separate code that's responsible for logical data transformation from code that's responsible for reading and writing the results. Asset definitions and ops can focus on business logic, while IO managers handle I/O. This separation makes it easier to test the business logic and run it in different environments. For non-asset jobs with inputs that aren't connected to upstream outputs, see the [Unconnected Inputs](/concepts/io-management/unconnected-inputs) overview. @@ -51,7 +51,7 @@ that are responsible for storing the output of an asset or op and loading it as to downstream assets or ops. For example, an IO manager might store and load objects from files on a filesystem. -Each software-defined asset can have its own IO manager. In the [multi-asset](/concepts/assets/multi-assets) case where multiple assets are outputted, each outputted asset can be handled with a different IO manager: +Each asset definition can have its own IO manager. In the [multi-asset](/concepts/assets/multi-assets) case where multiple assets are outputted, each outputted asset can be handled with a different IO manager:
    This guide covers using the new Pythonic resources system introduced in @@ -16,7 +16,9 @@ description: I/O Managers determine how to store asset/op outputs and load asset guide. -I/O Managers are user-provided objects that store asset and op outputs and load them as inputs to downstream assets and ops. They can be a powerful tool that helps you reduce boilerplate code and easily change where your data is stored. +I/O managers are user-provided objects that store asset and op outputs and load them as inputs to downstream assets and ops. They can be a powerful tool that reduces boilerplate code and easily changes where your data is stored. + +Functions decorated by , , and can return values that are written to persistent storage. Downstream assets and ops can have parameters that load the values from persistent storage. let the user control how this data is stored and how it's loaded in downstream ops and assets. For `@asset` and `@multi_asset`, the I/O manager effectively determines where the physical asset lives.
    -## Relevant APIs - -| Name | Description | | -| ------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | - | -| | A base class used to define configurable I/O managers, which are also configurable resources. | ƒ | -| | A base class used to specify configuration for more advanced I/O managers, where configuration is separate from the `IOManager` implementation class. | | -| | Base class for standalone I/O managers which are constructed by `ConfigurableIOManagerFactories`. | | -| | Function for directly constructing an , to be passed to the method. This is designed primarily for testing purposes. | | -| | Function for directly constructing an , to be passed to the method. This is designed primarily for testing purposes. | | +The I/O manager APIs make it easy to separate code that's responsible for logical data transformation from code that's responsible for reading and writing the results. Asset definitions and ops can focus on business logic, while I/O managers handle I/O. This separation makes it easier to test the business logic and run it in different environments. -## Overview +For non-asset jobs with inputs that aren't connected to upstream outputs, see the [Unconnected Inputs](/concepts/io-management/unconnected-inputs) overview. -Functions decorated by , , and can have parameters and return values that are loaded from and written to persistent storage. let the user control how this data is stored and how it's loaded in downstream ops and assets. For `@asset` and `@multi_asset`, the I/O manager effectively determines where the physical asset lives. +--- -The I/O manager APIs make it easy to separate code that's responsible for logical data transformation from code that's responsible for reading and writing the results. Software-defined assets and ops can focus on business logic, while I/O managers handle I/O. This separation makes it easier to test the business logic and run it in different environments. +## Basics -For non-asset jobs with inputs that aren't connected to upstream outputs, see the [Unconnected Inputs](/concepts/io-management/unconnected-inputs) overview. +- [When to use I/O managers](#when-to-use-io-managers) +- [Outputs and downstream inputs](#outputs-and-downstream-inputs) +- [Built-in I/O managers](#built-in-io-managers) +- [I/O managers are resources](#io-managers-are-resources) ### When to use I/O managers -I/O managers are a powerful tool that can simplify your code and allow you to easily modify where your assets are stored. They can be useful in situations where: - -- You store many of your assets in the same location and follow a consistent set of rules to determine the storage path. -- You want to swap out I/O manager implementations to change how your assets are stored in local, staging, and production environments. -- Your asset functions load the upstream dependencies into memory in order to do the function computation. +If you find yourself writing the same code at the start and end of each asset or op to load and store data, you may want to consider factoring that code into an I/O manager. They can be useful in situations where: -If you find yourself writing the same code at the start and end of each asset or op to load and store data, you may want to consider factoring that code into an I/O manager. +- Many assets are stored in the same location and follow a consistent set of rules to determine the storage path +- Assets should be stored differently in local, staging, and production environments +- Asset functions load the upstream dependencies into memory to do the computation -However, I/O managers are not required, nor are they the best option, in all scenarios. For example, an asset that executes a Snowflake query to create a new table based on the data of another table. This asset would depend on the existing table, but does not need to load that table in memory in order to execute the query: +However, I/O managers are not required, nor are they the best option in all scenarios. For example, an asset that executes a Snowflake query to create a new table based on the data of another table. This asset would depend on the existing table, but doesn't need to load that table in memory in order to execute the query: -```python file=/tutorial/saving/no_io_assets_examples.py startafter=start_snowflake_example endbefore=end_snowflake_example +```python file=/concepts/io_management/no_io_assets_examples.py startafter=start_snowflake_example endbefore=end_snowflake_example @asset(deps=[orders]) def returns(): conn = get_snowflake_connection() @@ -84,7 +79,7 @@ that are responsible for storing the output of an asset or op and loading it as to downstream assets or ops. For example, an I/O manager might store and load objects from files on a filesystem. -Each software-defined asset can have its own I/O manager. In the [multi-asset](/concepts/assets/multi-assets) case where multiple assets are outputted, each outputted asset can be handled with a different I/O manager: +Each asset definition can have its own I/O manager. In the [multi-asset](/concepts/assets/multi-assets) case where multiple assets are outputted, each outputted asset can be handled with a different I/O manager:
    , stores and retrieves values from pickle files in the local filesystem. If a job is invoked via , the default I/O manager is switched to , which stores outputs in memory. -Dagster provides out-of-the-box I/O managers for popular storage systems: AWS S3 (), Azure Blob Storage (), GCS (), and Snowflake () - or you can write your own: either from scratch or by extending the `UPathIOManager` if you want to store data in an `fsspec`-supported filesystem. For a full list of Dagster-provided I/O managers, refer to the [built-in I/O managers list](#built-in-io-managers-list). +Dagster provides out-of-the-box I/O managers for popular storage systems, such as Amazon S3 and Snowflake, or you can write your own: + +- From scratch, or +- By extending the `UPathIOManager` if you want to store data in an `fsspec`-supported filesystem + +For a full list of Dagster-provided I/O managers, refer to the [built-in I/O managers list](#built-in-io-managers-1). ### I/O managers are resources -I/O managers are provided through the [resources](/concepts/resources) system which means you can supply different I/O managers for the same assets or ops in different situations. For example, you might use an in-memory I/O manager for unit-testing and an Amazon S3 I/O manager in production. +I/O managers are provided through the [resources](/concepts/resources) system, which means you can supply different I/O managers for the same assets or ops in different situations. For example, you might use an in-memory I/O manager for unit-testing and an Amazon S3 I/O manager in production. --- -## Using I/O managers with Software-defined Assets +## Using I/O managers with asset definitions ### Applying I/O managers to assets @@ -238,6 +238,28 @@ defs = Definitions( ) ``` +### Using I/O managers to load source data + +Asset definitions often depend on data assets that are generated outside of Dagster, or in a different code location within Dagster, and it's often useful to use I/O managers to load the data from these assets. You can use an to define an asset with no materialization function, and you can assign an I/O manager to it using the method. Your other assets can then depend on it and load data from it, just as they would with a materializable asset. + +For example: + +```python file=/concepts/io_management/source_asset.py +from dagster import AssetKey, AssetSpec, Definitions, asset + +my_source_asset = AssetSpec(key=AssetKey("my_source_asset")).with_io_manager_key( + "s3_io_manager" +) + + +@asset +def my_derived_asset(my_source_asset): + return my_source_asset + [4] + + +defs = Definitions(assets=[my_source_asset, my_derived_asset]) +``` + ### Asset input I/O managers In some cases you may need to load the input to an asset with different logic than that specified by the upstream asset's I/O manager. @@ -275,6 +297,8 @@ defs = Definitions( ) ``` +--- + ## Using I/O managers with non-asset jobs ### Job-wide I/O manager @@ -356,6 +380,8 @@ def a_job(): op_2(op_1()) ``` +--- + ## Defining an I/O manager If you have specific requirements for where and how your outputs should be stored and retrieved, you can define your own I/O manager. This boils down to implementing two functions: one that stores outputs and one that loads inputs. @@ -401,8 +427,7 @@ class ExternalIOManager(IOManager): # setup stateful cache self._cache = {} - def handle_output(self, context: OutputContext, obj): - ... + def handle_output(self, context: OutputContext, obj): ... def load_input(self, context: InputContext): if context.asset_key in self._cache: @@ -605,6 +630,8 @@ def my_better_job(): better_analyze_as_numpy(df) ``` +--- + ## Examples ### A custom I/O manager that stores Pandas DataFrames in tables @@ -651,11 +678,7 @@ The `UPathIOManager` already implements the `load_input` and `handle_output` met import pandas as pd from upath import UPath -from dagster import ( - InputContext, - OutputContext, - UPathIOManager, -) +from dagster import InputContext, OutputContext, UPathIOManager class PandasParquetIOManager(UPathIOManager): @@ -726,9 +749,9 @@ In this case, the table names are encoded in the job definition. If, instead, yo ```python file=/concepts/io_management/metadata.py startafter=io_manager_start_marker endbefore=io_manager_end_marker class MyIOManager(ConfigurableIOManager): def handle_output(self, context: OutputContext, obj): - if context.metadata: - table_name = context.metadata["table"] - schema = context.metadata["schema"] + if context.definition_metadata: + table_name = context.definition_metadata["table"] + schema = context.definition_metadata["schema"] write_dataframe_to_table(name=table_name, schema=schema, dataframe=obj) else: raise Exception( @@ -736,9 +759,9 @@ class MyIOManager(ConfigurableIOManager): ) def load_input(self, context: InputContext): - if context.upstream_output and context.upstream_output.metadata: - table_name = context.upstream_output.metadata["table"] - schema = context.upstream_output.metadata["schema"] + if context.upstream_output and context.upstream_output.definition_metadata: + table_name = context.upstream_output.definition_metadata["table"] + schema = context.upstream_output.definition_metadata["schema"] return read_dataframe_from_table(name=table_name, schema=schema) else: raise Exception("Upstream output doesn't have schema and metadata set") @@ -861,9 +884,23 @@ class DataframeTableIOManagerWithMetadata(ConfigurableIOManager): Any entries yielded this way will be attached to the `Handled Output` event for this output. -Additionally, if the handled output is part of a software-defined asset, these metadata entries will also be attached to the materialization event created for that asset and show up on the Asset Details page for the asset. +Additionally, if the handled output is part of an asset definition, these metadata entries will also be attached to the materialization event created for that asset and show up on the Asset Details page for the asset. + +--- + +## See it in action + +For more examples of I/O managers, check out the following in our [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): + +- [Parquet I/O manager](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/resources/parquet_io_manager.py) -## Built-in I/O managers list +Our [Type and Metadata example](https://github.com/dagster-io/dagster/tree/master/examples/assets_pandas_type_metadata) also covers writing custom I/O managers. + +--- + +## References + +### Built-in I/O managers | Name | Description | Additional Documentation | | ------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | @@ -880,10 +917,12 @@ Additionally, if the handled output is part of a software-defined asset, these m | | Stores PySpark DataFrame outputs in DuckDB. | | | | Stores Polars DataFrame outputs in DuckDB. | | -## See it in action - -For more examples of I/O managers, check out the following in our [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): - -- [Parquet I/O Manager](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/resources/parquet_io_manager.py) +### Relevant APIs -Our [Type and Metadata example](https://github.com/dagster-io/dagster/tree/master/examples/assets_pandas_type_metadata) also covers writing custom I/O managers. +| Name | Description | | +| ------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | - | +| | A base class used to define configurable I/O managers, which are also configurable resources. | | +| | A base class used to specify configuration for more advanced I/O managers, where configuration is separate from the `IOManager` implementation class. | | +| | Base class for standalone I/O managers which are constructed by `ConfigurableIOManagerFactories`. | | +| | Function for directly constructing an , to be passed to the method. This is designed primarily for testing purposes. | | +| | Function for directly constructing an , to be passed to the method. This is designed primarily for testing purposes. | | diff --git a/docs/content/concepts/io-management/unconnected-inputs.mdx b/docs/content/concepts/io-management/unconnected-inputs.mdx index 7c48c9c7bf526..fd3557914493a 100644 --- a/docs/content/concepts/io-management/unconnected-inputs.mdx +++ b/docs/content/concepts/io-management/unconnected-inputs.mdx @@ -1,33 +1,31 @@ --- -title: Unconnected Inputs | Dagster -description: +title: Unconnected inputs in op jobs | Dagster +description: "Learn to work with unconnected inputs in op jobs." --- -# Unconnected Inputs +# Unconnected inputs in op jobs -## Relevant APIs +Ops in a job may have input definitions that don't correspond to the outputs of upstream ops. -| Name | Description | -| -------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| | The decorator used to define a Dagster Type Loader. | -| | The base class used to specify how to load inputs that depends on the type. | -| | The context object provided to the function decorated by `@dagster_type_loader`. | -| | The decorator used to define an input manager. | -| | The base class used to specify how to load inputs. | +Values for these inputs can be provided in a few ways. Dagster will check the following, in order, and use the first available: -## Overview +- **Input manager** - If the input to a job comes from an external source, such as a table in a database, you may want to define a resource responsible for loading it. This makes it easy to swap out implementations in different jobs and mock it in tests. -Ops in a job may have input definitions that don't correspond to the outputs of upstream ops. You can provide values for these inputs in a few different ways. Dagster checks each, in order, and uses the first that's available: + A special I/O manager, which can be referenced from s, can be used to load unconnected inputs. Refer to the [I/O manager documentation](/concepts/io-management/io-managers) for more information about I/O managers. -- **Input Manager** - If the input to a job comes from an external source, such as a table in a database, often it makes sense to define a resource that's responsible for loading it. This makes it easy to swap out implementations in different jobs and mock it in tests. A special IOManager, which can be referenced from s, can be used to load unconnected inputs. -- **Dagster Type Loader** - A provides a way to specify how to load inputs that depends on a type. A can be placed on , which can be placed on . -- **Default Values** - accepts a `default_value` argument. +- **Dagster Type loader** - A provides a way to specify how to load inputs that depends on a type. A can be placed on , which can be placed on . -## Examples +- **Default values** - accepts a `default_value` argument. -### Loading a built-in dagster type from config +**Unsure if I/O managers are right for you?** Check out the [When to use I/O managers](/concepts/io-management/io-managers#when-to-use-io-managers) section of the I/O manager documentation. -When you have an op at the beginning of a job that operates on a built-in dagster type like string or int, you can provide a value for that input via run config. +--- + +## Working with Dagster types + +### Loading a built-in Dagster type from config + +When you have an op at the beginning of a job that operates on a built-in Dagster type like `string` or `int`, you can provide a value for that input via run config. Here's a basic job with an unconnected string input: @@ -50,9 +48,9 @@ my_job.execute_in_process( ) ``` -### Loading a custom dagster type from config +### Loading a custom Dagster type from config -When you have an op at the beginning of your job that operates on a dagster type that you've defined, you can write your own to define how to load that input via run config. +When you have an op at the beginning of your job that operates on a Dagster type that you've defined, you can write your own to define how to load that input via run config. ```python file=/concepts/io_management/load_custom_type_from_config.py startafter=def_start_marker endbefore=def_end_marker from typing import Dict, Union @@ -118,13 +116,22 @@ my_job.execute_in_process( ) ``` -### Providing an input manager for an unconnected input +--- + +## Working with input managers + +### Providing an input manager for unconnected inputs + +When you have an op at the beginning of a job that operates on data from an external source, you might wish to separate that I/O from your op's business logic, in the same way you would with an I/O manager if the op were loading from an upstream output. -When you have an op at the beginning of a job that operates on data from an external source, you might wish to separate that I/O from your op's business logic, in the same way you would with an IO manager if the op were loading from an upstream output. +Use the following tabs to learn about how to achieve this in Dagster. -Dagster provides a few ways to achieve this: + + -One option is to write a function to load the input and decorate it with : +#### Option 1: Using the input_manager decorator + +In this example, we wrote a function to load the input and decorated it with : ```python file=/concepts/io_management/input_managers.py startafter=start_load_unconnected_via_fn endbefore=end_load_unconnected_via_fn @input_manager @@ -143,7 +150,12 @@ def simple_load_table_job(): my_op() ``` -Another option is to define a class that implements the interface. + + + +#### Option 2: Use a class to implement the InputManager interface + +In this example, we defined a class that implements the interface: ```python file=/concepts/io_management/input_managers.py startafter=start_load_unconnected_input endbefore=end_load_unconnected_input class Table1InputManager(InputManager): @@ -161,7 +173,7 @@ def load_table_job(): my_op() ``` -If you also want to use `Table1InputManager` to store outputs, or you want to override the `load_input` method of an IO Manager used elsewhere in the job, another option is to implement an instance of : +To use `Table1InputManager` to store outputs or override the `load_input` method of an I/O manager used elsewhere in the job, another option is to implement an instance of : ```python file=/concepts/io_management/input_managers.py startafter=start_load_unconnected_io endbefore=end_load_unconnected_io # in this example, TableIOManager is defined elsewhere and we just want to override load_input @@ -175,13 +187,16 @@ def io_load_table_job(): my_op() ``` -In all of these examples, setting the `input_manager_key` on an `In` controls how that input is loaded. + + + +In any of the examples in Option 1 or Option 2, setting the `input_manager_key` on an `In` controls how that input is loaded. -### Providing per-input config to an input manager +### Providing per-input config to input managers When launching a run, you might want to parameterize how particular inputs are loaded. -To accomplish this, you can define an `input_config_schema` on the IO manager or Input manager definition. The `load_input` function can access this config when storing or loading data, via the . +To accomplish this, you can define an `input_config_schema` on the I/O manager or input manager definition. The `load_input` function can access this config when storing or loading data, via the : ```python file=/concepts/io_management/input_managers.py startafter=start_per_input_config endbefore=end_per_input_config class MyConfigurableInputLoader(InputManager): @@ -202,7 +217,7 @@ def my_other_configurable_input_loader(context): return read_dataframe_from_table(name=context.config["table"]) ``` -Then, when executing a job, you can pass in this per-input config. +Then, when executing a job, you can pass in this per-input config: ```python file=/concepts/io_management/input_managers.py startafter=start_per_input_config_exec endbefore=end_per_input_config_exec load_table_job.execute_in_process( @@ -210,7 +225,7 @@ load_table_job.execute_in_process( ) ``` -### Using a input manager with subselection +### Using input managers with subselection You might want to execute a subset of ops in your job and control how the inputs of those ops are loaded. Custom input managers also help in these situations, because the inputs at the beginning of the subset become unconnected inputs. @@ -274,3 +289,15 @@ my_subselection_job.execute_in_process( op_selection=["op2"], ) ``` + +--- + +## Relevant APIs + +| Name | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| | The decorator used to define a Dagster Type Loader. | +| | The base class used to specify how to load inputs that depends on the type. | +| | The context object provided to the function decorated by `@dagster_type_loader`. | +| | The decorator used to define an input manager. | +| | The base class used to specify how to load inputs. | diff --git a/docs/content/concepts/logging/custom-loggers.mdx b/docs/content/concepts/logging/custom-loggers.mdx index cc8a3d6921abb..c40f2a7249088 100644 --- a/docs/content/concepts/logging/custom-loggers.mdx +++ b/docs/content/concepts/logging/custom-loggers.mdx @@ -42,7 +42,14 @@ def json_console_logger(init_context): class JsonFormatter(logging.Formatter): def format(self, record): - return json.dumps(record.__dict__) + return json.dumps( + { + k: v + for k, v in record.__dict__.items() + # values for these keys are not directly JSON-serializable + if k not in ["dagster_event", "dagster_meta"] + } + ) handler.setFormatter(JsonFormatter()) logger_.addHandler(handler) diff --git a/docs/content/concepts/logging/loggers.mdx b/docs/content/concepts/logging/loggers.mdx index 6556dad6dc476..6debc1299f957 100644 --- a/docs/content/concepts/logging/loggers.mdx +++ b/docs/content/concepts/logging/loggers.mdx @@ -203,8 +203,7 @@ from dagster import Definitions, define_asset_job, asset @asset -def some_asset(): - ... +def some_asset(): ... the_job = define_asset_job("the_job", selection="*") diff --git a/docs/content/concepts/logging/python-logging.mdx b/docs/content/concepts/logging/python-logging.mdx index b5c615107728b..8208303dc7be1 100644 --- a/docs/content/concepts/logging/python-logging.mdx +++ b/docs/content/concepts/logging/python-logging.mdx @@ -98,7 +98,7 @@ python_logs: ## Configuring Python log handlers -In your `dagster.yaml` file, you can configure handlers and formatters that will apply to the Dagster instance. This will apply the same logging configuration to all runs. +In your `dagster.yaml` file, you can configure handlers, formatters and filters that will apply to the Dagster instance. This will apply the same logging configuration to all runs. For example: @@ -111,14 +111,19 @@ python_logs: level: INFO stream: ext://sys.stdout formatter: myFormatter + filters: + - myFilter formatters: myFormatter: format: "My formatted message: %(message)s" + filters: + myFilter: + name: dagster ``` -Handler and formatter configuration follows the [dictionary config schema format](https://docs.python.org/3/library/logging.config.html#logging-config-dictschema) in the Python logging module. Only the `handlers` and `formatters` dictionary keys will be accepted, as Dagster creates loggers internally. +Handler, filter and formatter configuration follows the [dictionary config schema format](https://docs.python.org/3/library/logging.config.html#logging-config-dictschema) in the Python logging module. Only the `handlers`, `formatters` and `filters` dictionary keys will be accepted, as Dagster creates loggers internally. -From there, standard `context.log` calls will output with the configured handlers and formatters. After execution, read the output log file `my_dagster_logs.log`. As expected, the log file contains the formatted log: +From there, standard `context.log` calls will output with the configured handlers, formatters and filters. After execution, read the output log file `my_dagster_logs.log`. As expected, the log file contains the formatted log: log-file-output classes provided by Dagster +- Primitive Python types, which Dagster will convert to the appropriate + +For example, to attach the name of the table we expect to store the asset in, we'll add a `"dataset_name"` entry to the `metadata` argument: + +```python file=/concepts/assets/asset_definition_metadata.py startafter=start endbefore=end +from dagster_duckdb import DuckDBResource + +from dagster import asset + +# ... other assets + + +@asset( + deps=[iris_dataset], + metadata={"dataset_name": "iris.small_petals"}, +) +def small_petals(duckdb: DuckDBResource) -> None: + with duckdb.get_connection() as conn: + conn.execute( + "CREATE TABLE iris.small_petals AS SELECT * FROM iris.iris_dataset WHERE" + " 'petal_length_cm' < 1 AND 'petal_width_cm' < 1" + ) +``` + +Dagster provides a [standard set of metadata keys](#standard-asset-metadata-entries) that can be used for common types of metadata, such as an asset's URI or column schema. **Note:** These entries are intended to be a starting point, and we encourage you to create your own metadata keys that make sense within the context of your data platform. + +### Asset owners + + + Did you know? If using Dagster+ Pro, you can create + asset-based alerts that will automatically notify an asset's owners when + triggered. Refer to the{" "} + + Dagster+ alert documentation + {" "} + for more information. + + +An asset can have multiple owners, defined using the `owners` argument on the decorator. This argument accepts a dictionary of owners, where each value is either an individual email address or a team. Teams must include a `team:` prefix; for example: `team:data-eng`. + +The asset in the following example has two owners: `richard.hendricks@hooli.com` and the `data-eng` team. + +```python file=/concepts/metadata-tags/asset_owners.py +from dagster import asset + + +@asset(owners=["richard.hendricks@hooli.com", "team:data-eng"]) +def leads(): ... +``` + +### Code references + +Attaching code references to an asset definition allows you to easily navigate to the asset's source code, either locally in your editor or in your source control repository. For more information, refer to the [Code references guide](/guides/dagster/code-references). + +--- + +## Attaching materialization metadata + +Attaching materialization metadata to an asset is accomplished by returning a object containing a `metadata` parameter. This parameter accepts a dictionary of key/value pairs, where keys must be a string. + +When specifying values, use the utility class to wrap the data, ensuring it displays correctly in the UI. Values can also be primitive Python types, which Dagster will convert to the appropriate . + +### Arbitrary metadata + +In the following example, we added a row count and preview to a `topstories` asset: + +```python file=/tutorial/building_an_asset_graph/assets_with_metadata.py lines=3-5,13,16-42 +import json +import requests +import pandas as pd +from dagster import AssetExecutionContext, MetadataValue, asset, MaterializeResult + + +@asset(deps=[topstory_ids]) +def topstories(context: AssetExecutionContext) -> MaterializeResult: + with open("data/topstory_ids.json", "r") as f: + topstory_ids = json.load(f) + + results = [] + for item_id in topstory_ids: + item = requests.get( + f"https://hacker-news.firebaseio.com/v0/item/{item_id}.json" + ).json() + results.append(item) + + if len(results) % 20 == 0: + context.log.info(f"Got {len(results)} items so far.") + + df = pd.DataFrame(results) + df.to_csv("data/topstories.csv") + + return MaterializeResult( + metadata={ + "num_records": len(df), # Metadata can be any key-value pair + "preview": MetadataValue.md(df.head().to_markdown()), + # The `MetadataValue` class has useful static methods to build Metadata + } + ) +``` + +Dagster provides a [standard set of metadata keys](#standard-asset-metadata-entries) that can be used for common types of metadata, such as an asset's URI or column schema. **Note:** These entries are intended to be a starting point, and we encourage you to create your own metadata keys that make sense within the context of your data platform. + +### Table and column metadata + +For assets which produce database tables, you can attach table metadata to provide additional context about the asset. Table metadata can include information such as the schema, row count, or column lineage. Refer to the [Table metadata documentation](/concepts/metadata-tags/asset-metadata/table-metadata) for more information, or the [Column-level lineage documentation](/concepts/metadata-tags/asset-metadata/column-level-lineage) for specific details on column-level lineage. + +--- + +## Viewing asset metadata in the Dagster UI + +Metadata attached to assets shows up in a few places in the [Dagster UI](/concepts/webserver/ui). + + + + +### Global asset lineage + +In the [**Global asset lineage**](/concepts/webserver/ui#global-asset-lineage) page, click on an asset to open the asset details in the sidepanel: + + + + + +If materialization metadata is numeric, it will display as a plot in the **Metadata plots** section of the sidepanel. + + + + +### Asset details + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Definition tab +
    + In the{" "} + + Asset details + {" "} + page, metadata will display in the Definition tab. + + Definitions tab showing metadata in the Asset details page of the Dagster UI +
    + Overview tab +
    + If you have the Use new asset overview page{" "} + user setting enabled, + metadata will display in the Overview tab instead of{" "} + Definition. + + Overview tab showing metadata in the Asset details page of the Dagster UI +
    + Plots tab +
    + If any materialization data is numeric, check out the{" "} + Plots tab to view it. + + Plots tab showing metadata in the Asset details page of the Dagster UI +
    + Insights tab +
    + If using a Dagster+ Pro plan, numeric materialization metadata will also + display in the Insights tab. + + Insights tab showing metadata in the Asset details page of the Dagster UI +
    + +
    + + +### Dagster+ Insights + +A Dagster+ Pro plan is required to use this feature. + +Dagster+ users can view and add numeric asset materialization metrics to [Insights](/dagster-plus/insights), allowing you to track user-provided metrics alongside Dagster+ metrics. + + + + + +Refer to the [Integrating asset metadata into Dagster+ Insights](/dagster-plus/insights/asset-metadata) guide for more information. + + +
    + +--- + +## References + +### APIs in this guide + +| Name | Description | +| --------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------- | +| | A decorator used to define assets. | +| | An object representing a successful materialization of an asset. | +| | Utility class to wrap metadata values passed into Dagster events, which allows them to be displayed in the Dagster UI and other tooling. | + +### Standard asset metadata entries + +The following is a set of standard asset metadata entries that can be included in the dictionaries passed to `metadata` attributes of , , etc. Many of these receive special treatment in Dagster's UI, such as `dagster/column_schema` resulting in a **Columns** section on the **Overview** tab of the **Asset details** page. + +The `dagster` prefix indicates that the Dagster package takes responsibility for defining the meaning of these metadata entries. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Key + Details
    + dagster/uri + +
      +
    • + Value: str +
    • +
    • + Description: The URI for the asset, e.g.{" "} + "s3://my_bucket/my_object" +
    • +
    +
    + dagster/column_schema + +
      +
    • + Value: +
    • +
    • + Description: For an asset that's a table, the + schema of the columns in the table. Refer to the the{" "} + + Table metadata documentation + {" "} + for details. +
    • +
    +
    + dagster/column_lineage + +
      +
    • + Value: +
    • +
    • + Description: For an asset that's a table, the + lineage of column inputs to column outputs for the table. Refer to + the{" "} + + Column lineage documentation + {" "} + for details. +
    • +
    +
    + dagster/row_count + +
      +
    • + Value: int +
    • +
    • + Description: For an asset that's a table, the + number of rows in the table. Refer to the{" "} + + Table metadata documentation + {" "} + for details. +
    • +
    +
    + dagster/partition_row_count + +
      +
    • + Value: int +
    • +
    • + Description: For a partition of an asset that's a + table, the number of rows in the partition. +
    • +
    +
    + dagster/relation_identifier + +
      +
    • + Value: str +
    • +
    • + Description: A unique identifier for the + table/view, typically fully qualified. For example,{" "} + my_database.my_schema.my_table +
    • +
    +
    + dagster/code_references + +
      +
    • + Value:{" "} + +
    • +
    • + Description: A list of code references for the + asset, such as file locations or references to Github URLs. Refer to + the{" "} + + Code references documentation + {" "} + for details. Should only be provided in definition-level metadata, + not materialization metadata. +
    • +
    +
    + +--- + +## Related + + + + + + + + diff --git a/docs/content/concepts/metadata-tags/asset-metadata/column-level-lineage.mdx b/docs/content/concepts/metadata-tags/asset-metadata/column-level-lineage.mdx new file mode 100644 index 0000000000000..f67e2587cf244 --- /dev/null +++ b/docs/content/concepts/metadata-tags/asset-metadata/column-level-lineage.mdx @@ -0,0 +1,149 @@ +--- +title: "Column-level lineage | Dagster Docs" +description: "Column lineage enables data and analytics engineers alike to understand how a column is created and used in your data platform." +--- + +# Column-level lineage + +For assets that produce database tables, column-level lineage can be a powerful tool for improving collaboration and debugging issues. Column lineage enables data and analytics engineers alike to understand how a column is created and used in your data platform. + +--- + +## How it works + +Emitted as materialization metadata, column lineage can be: + +- Specified on assets defined in Dagster +- Enabled for assets loaded from integrations like dbt + +Dagster uses this metadata to display the column's upstream and downstream dependencies, accessible via the asset's details page in the Dagster UI. **Note**: Viewing column-level lineage in the UI is a Dagster+ feature. + +--- + +## Enabling column-level lineage + +### For assets defined in Dagster + +To enable column-level lineage on Dagster assets that produce database tables, you'll need to: + +1. Return a object containing a `metadata` parameter +2. In `metadata`, use the `dagster/column_lineage` key to create a object +3. In this object, use to define a list of columns +4. For each column, use to define its dependencies. This object accepts `asset_key` and `column_name` arguments, allow you to specify the name of the asset and column that make up the dependency. + +Let's take a look at an example: + +```python file=/concepts/metadata-tags/asset_column_lineage.py +from dagster import ( + AssetKey, + MaterializeResult, + TableColumnDep, + TableColumnLineage, + asset, +) + + +@asset(deps=[AssetKey("source_bar"), AssetKey("source_baz")]) +def my_asset(): + yield MaterializeResult( + metadata={ + "dagster/column_lineage": TableColumnLineage( + deps_by_column={ + "new_column_foo": [ + TableColumnDep( + asset_key=AssetKey("source_bar"), + column_name="column_bar", + ), + TableColumnDep( + asset_key=AssetKey("source_baz"), + column_name="column_baz", + ), + ], + "new_column_qux": [ + TableColumnDep( + asset_key=AssetKey("source_bar"), + column_name="column_quuz", + ), + ], + } + ) + } + ) +``` + +When materialized, the `my_asset` asset will create two columns: `new_column_foo` and `new_column_qux`. + +The `new_column_foo` column is dependent on two other columns: + +1. `column_bar` from the `source_bar` asset +2. `column_baz` from the `source_baz` asset + +And the second column, `new_column_qux` has is dependent on `column_quuz` from the `source_bar` asset. + +If using Dagster+, you can view the column-level lineage in the Dagster UI. + +### For assets loaded from integrations + +Column-level lineage is currently supported for the dbt integration. Refer to the [dbt documentation](/integrations/dbt/reference) for more information. + +--- + +## Viewing column-level lineage in the Dagster UI + +Viewing column lineage in the UI is a Dagster+ feature. + +1. In the Dagster UI, open the **Asset details** page for an asset with column-level lineage enabled. +2. Navigate to the **Overview** tab if it isn't already open. +3. In the **Columns** section, click the **branch** icon in the row of the column you want to view. The icon is on the far right side of the row: + + + + Highlighted column lineage icon in the Asset details page of the Dagster UI + +The graph will display the column's column dependencies, grouped by asset: + + + + + +To view another column's lineage, click the **Column** dropdown and select another column. + +--- + +## APIs in this guide + +| Name | Description | +| ---------------------------------------- | ----------------------------------------------------------------------- | +| | A decorator used to define assets. | +| | An object representing a successful materialization of an asset. | +| | Class that defines column outputs to column inputs for a tabular asset. | +| | Object that represents a column in an asset. | + +--- + +## Related + + + + + + diff --git a/docs/content/concepts/metadata-tags/asset-metadata/table-metadata.mdx b/docs/content/concepts/metadata-tags/asset-metadata/table-metadata.mdx new file mode 100644 index 0000000000000..0f6b732c23a33 --- /dev/null +++ b/docs/content/concepts/metadata-tags/asset-metadata/table-metadata.mdx @@ -0,0 +1,160 @@ +--- +title: "Table metadata | Dagster Docs" +description: "Table metadata can be used to provide additional context about a tabular asset, such as its schema, row count, and more." +--- + +# Table metadata + +Table metadata provides additional context about a tabular asset, such as its schema, row count, and more. This metadata can be used to improve collaboration, debugging, and data quality in your data platform. + +Dagster supports attaching different types of table metadata to assets, including: + +- [**Column schema**](#attaching-column-schema), which describes the structure of the table, including column names and types +- [**Row count**](#attaching-row-count), which describes the number of rows in a materialized table +- [**Column-level lineage**](#attaching-column-level-lineage), which describes how a column is created and used by other assets + +--- + +## Attaching column schema + +### For assets defined in Dagster + +Column schema metadata can be attached to Dagster assets either as [definition metadata](/concepts/metadata-tags/asset-metadata#attaching-definition-metadata) or [materialization metadata](/concepts/metadata-tags/asset-metadata#attaching-materialization-metadata), which will then be visible in the Dagster UI. For example: + + + +If the schema of your asset is pre-defined, you can attach it as definition metadata. If the schema is only known when an asset is materialized, you can attach it as metadata to the materialization. + +To attach schema metadata to an asset, you will need to: + +1. Construct a object with entries describing each column in the table +2. Attach the `TableSchema` object to the asset as part of the `metadata` parameter under the `dagster/column_schema` key. This can be attached to your asset definition, or to the object returned by the asset function. + +Below are two examples of how to attach column schema metadata to an asset, one as definition metadata and one as materialization metadata: + +```python file=/concepts/metadata-tags/asset_column_schema.py +from dagster import AssetKey, MaterializeResult, TableColumn, TableSchema, asset + + +# Definition metadata +# Here, we know the schema of the asset, so we can attach it to the asset decorator +@asset( + deps=[AssetKey("source_bar"), AssetKey("source_baz")], + metadata={ + "dagster/column_schema": TableSchema( + columns=[ + TableColumn( + "name", + "string", + description="The name of the person", + ), + TableColumn( + "age", + "int", + description="The age of the person", + ), + ] + ) + }, +) +def my_asset(): ... + + +# Materialization metadata +# Here, the schema isn't known until runtime +@asset(deps=[AssetKey("source_bar"), AssetKey("source_baz")]) +def my_other_asset(): + column_names = ... + column_types = ... + + columns = [ + TableColumn(name, column_type) + for name, column_type in zip(column_names, column_types) + ] + + yield MaterializeResult( + metadata={"dagster/column_schema": TableSchema(columns=columns)} + ) +``` + +The schema for `my_asset` will be visible in the Dagster UI. + +### For assets loaded from integrations + +Dagster's dbt integration enables automatically attaching column schema metadata to assets loaded from dbt models. Refer to the [dbt documentation](/integrations/dbt/reference#fetching-column-level-metadata) for more information. + +--- + +## Attaching row count + +Row count metadata can be attached to Dagster assets as [materialization metadata](/concepts/metadata-tags/asset-metadata#attaching-materialization-metadata) to provide additional context about the number of rows in a materialized table. This will be highlighted in the Dagster UI. For example: + + + +In addition to showing the latest row count, Dagster will let you track changes in the row count over time, and you can use this information to monitor data quality. + +To attach row count metadata to an asset, you will need to attach a numerical value to the `dagster/row_count` key in the metadata parameter of the object returned by the asset function. For example: + +```python file=/concepts/metadata-tags/asset_row_count.py +import pandas as pd + +from dagster import AssetKey, MaterializeResult, asset + + +@asset(deps=[AssetKey("source_bar"), AssetKey("source_baz")]) +def my_asset(): + my_df: pd.DataFrame = ... + + yield MaterializeResult(metadata={"dagster/row_count": 374}) +``` + +--- + +## Attaching column-level lineage + +Column lineage enables data and analytics engineers alike to understand how a column is created and used in your data platform. Refer to the [Column-level lineage documentation](/concepts/metadata-tags/asset-metadata/column-level-lineage) for more information. + +--- + +## APIs in this guide + +| Name | Description | +| -------------------------------------------- | ---------------------------------------------------------------- | +| | A decorator used to define assets. | +| | An object representing a successful materialization of an asset. | +| | An object representing the schema of a tabular asset. | +| | Class that defines column information for a tabular asset. | +| | Class that defines constraints for a column in a tabular asset. | + +--- + +## Related + + + + + + + diff --git a/docs/content/concepts/metadata-tags/kind-tags.mdx b/docs/content/concepts/metadata-tags/kind-tags.mdx new file mode 100644 index 0000000000000..e2ede0b43be64 --- /dev/null +++ b/docs/content/concepts/metadata-tags/kind-tags.mdx @@ -0,0 +1,170 @@ +--- +title: "Kind Tags | Dagster Docs" +description: "Use kind tags to easily categorize assets within you Dagster project." +--- + +# Kind Tags + +Kind tags can help you quickly identify the underlying system or technology used for a given asset in the Dagster UI. These tags allow you to organize assets within a Dagster project and can be used to filter or search across all your assets. + +## Adding Compute Kinds to assets + +You can add a tag to any asset by providing a value to the `compute_kind` argument. + +```python +@asset(compute_kind="dbt") +def my_asset(): + pass +``` + +When viewing the asset in the lineage view, the compute kind will be visible at the bottom the asset. + +## Supported Icons + +Some kinds are given a branded icon in the UI. We currently support nearly 200 unique technology icons. + +| Value | Icon | +| ------------------- | ------------------------------------------------------------------------------------------------------------------ | +| `airbyte` | | +| `airflow` | | +| `airtable` | | +| `atlan` | | +| `aws` | | +| `axioma` | | +| `azure` | | +| `azureml` | | +| `bigquery` | | +| `catboost` | | +| `celery` | | +| `census` | | +| `chalk` | | +| `claude` | | +| `collibra` | | +| `cplus` | | +| `csharp` | | +| `cube` | | +| `dask` | | +| `databricks` | | +| `datadog` | | +| `datahub` | | +| `dbt` | | +| `deltalake` | | +| `discord` | | +| `dlt` | | +| `dlthub` | | +| `docker` | | +| `duckdb` | | +| `excel` | | +| `facebook` | | +| `fivetran` | | +| `gemini` | | +| `github` | | +| `gitlab` | | +| `go` | | +| `google` | | +| `googlecloud` | | +| `googlesheets` | | +| `graphql` | | +| `greatexpectations` | | +| `hackernews` | | +| `hashicorp` | | +| `hex` | | +| `hightouch` | | +| `hudi` | | +| `huggingface` | | +| `iceberg` | | +| `instagram` | | +| `java` | | +| `javascript` | | +| `jupyter` | | +| `k8s` | | +| `kubernetes` | | +| `lakefs` | | +| `lightgbm` | | +| `linear` | | +| `linkedin` | | +| `llama` | | +| `looker` | | +| `matplotlib` | | +| `meltano` | | +| `meta` | | +| `metabase` | | +| `microsoft` | | +| `minstral` | | +| `mlflow` | | +| `modal` | | +| `mongodb` | | +| `montecarlo` | | +| `mysql` | | +| `.net` | | +| `noteable` | | +| `notion` | | +| `numpy` | | +| `omni` | | +| `openai` | | +| `openmetadata` | | +| `optuna` | | +| `oracle` | | +| `pagerduty` | | +| `pandas` | | +| `pandera` | | +| `papermill` | | +| `papertrail` | | +| `parquet` | | +| `plotly` | | +| `plural` | | +| `polars` | | +| `postgres` | | +| `powerbi` | | +| `prefect` | | +| `python` | | +| `pytorch` | | +| `pytorchlightning` | | +| `r` | | +| `ray` | | +| `react` | | +| `reddit` | | +| `redshift` | | +| `rockset` | | +| `rust` | | +| `sagemaker` | | +| `salesforce` | | +| `scala` | | +| `scikitlearn` | | +| `scipy` | | +| `sdf` | | +| `secoda` | | +| `segment` | | +| `sharepoint` | | +| `shell` | | +| `shopify` | | +| `slack` | | +| `sling` | | +| `snowflake` | | +| `soda` | | +| `spark` | | +| `sql` | | +| `sqlite` | | +| `sqlmesh` | | +| `sqlserver` | | +| `stepfunctions` | | +| `stitch` | | +| `stripe` | | +| `tableau` | | +| `teams` | | +| `tecton` | | +| `tensorflow` | | +| `thoughtspot` | | +| `trino` | | +| `twilio` | | +| `twitter` | | +| `typescript` | | +| `vercel` | | +| `w&b` | | +| `x` | | +| `xgboost` | | +| `youtube` | | + +## Requesting additional icons + +The kinds icon pack is open source and anyone can contribute new icons to the public repo or request a new icon by filing an issue. Custom icons per deployment are not currently supported. diff --git a/docs/content/concepts/metadata-tags/op-job-metadata.mdx b/docs/content/concepts/metadata-tags/op-job-metadata.mdx new file mode 100644 index 0000000000000..e79e68ea51c5b --- /dev/null +++ b/docs/content/concepts/metadata-tags/op-job-metadata.mdx @@ -0,0 +1,98 @@ +--- +title: "Op job metadata | Dagster Docs" +description: Attach information to op jobs using metadata +--- + +# Op job metadata + +Metadata allows you to attach information to [op jobs](/concepts/ops-jobs-graphs/op-jobs) and the runs launched from those jobs. By the end of this guide, you'll understand what metadata is, how to attach it to an op job, and how to view the job metadata in the Dagster UI. + +--- + +## How it works + +Attaching metadata to a job is done using the `metadata` argument and a dictionary of key/value pairs. Keys must be a string, but values can: + +- Be any of the classes provided by Dagster +- Primitive Python types, which Dagster will convert to the appropriate + +The information you provide can be whatever you want, but possible cases include: + +- Keeping track of the team responsible for maintaining a job +- Linking to documentation or other resources +- Displaying the git hash corresponding to the current job definition + +**Note:** If you are running Dagster using separate webserver and user code installations (more info [here](/deployment/overview)), then your `dagster-webserver` installation must be `>=0.14.18` to use metadata on jobs. + +--- + +## Specifying op job metadata + +### Using the @job decorator + +```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_metadata_on_job endbefore=end_metadata_on_job +@op +def my_op(): + return "Hello World!" + + +@job( + metadata={ + "owner": "data team", # will be converted to MetadataValue.text + "docs": MetadataValue.url("https://docs.dagster.io"), + } +) +def my_job_with_metadata(): + my_op() +``` + +### Using the to_job method + +In addition to adding metadata on the decorator, you can also add metadata using the method: + +```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_metadata_on_graph_to_job endbefore=end_metadata_on_graph_to_job +@graph +def my_graph(): + my_op() + + +my_second_job_with_metadata = my_graph.to_job( + metadata={"owner": "api team", "docs": MetadataValue.url("https://docs.dagster.io")} +) +``` + +### Applying tags + +Tags can also be used to attach information to jobs. Refer to the [Tags](/concepts/metadata-tags/tags) documentation for more information. + +--- + +## Viewing op job metadata in the Dagster UI + +After attaching metadata to a job, you can view it in the Dagster UI by navigating to the **Job overview** page. Metadata will be displayed in the right pane. + + + +--- + +## Related + + + + + + diff --git a/docs/content/concepts/metadata-tags/tags.mdx b/docs/content/concepts/metadata-tags/tags.mdx new file mode 100644 index 0000000000000..b96a3c89a63a0 --- /dev/null +++ b/docs/content/concepts/metadata-tags/tags.mdx @@ -0,0 +1,192 @@ +--- +title: "Tags | Dagster Docs" +description: "Use tags to easily find objects and runs in your Dagster project." +--- + +# Tags + +Over time, your Dagster project may grow to include a large number of Dagster definitions. Using **tags** to label assets, ops, jobs, and job runs can make them easier to find. + +--- + +## Benefits + +Using tags helps you: + +- Organize definitions in your Dagster project +- Improve filtering in the Dagster UI, making it easier to focus on specific items +- Customize [run execution](#customizing-run-execution), including run concurrency and priority + +--- + +## How it works + +Tags are key/value pairs of strings that can be applied to assets, ops, jobs, and job runs using the `tags` argument. For example: + +```python file=/concepts/metadata-tags/tags.py startafter=start_asset_tags endbefore=end_asset_tags +@asset(tags={"domain": "marketing", "pii": "true"}) +def leads(): ... +``` + +Tags are helpful when you need to find specific items in your Dagster project. For example, tags can be used as a filter when looking at the **Asset catalog** in the Dagster UI, allowing you to only see specific assets. + +### Tag keys + +Valid tag keys have two segments: an optional prefix and name, separated by a slash (`/`). Prefixes are typically expected to be Python package names. For example: `dagster/priority` + +Prefixes and name segments must each: + +- Be 63 characters or less +- Contain only alphanumeric characters, dashes (`-`), underscores (`_`), and dots (`.`) + +### Tag values + +Tag values must: + +- Be 63 characters or less +- Contain only alphanumeric characters, dashes (`-`), underscores (`_`), and dots (`.`) +- Be a string or JSON that is serializable to a string + +--- + +## Specifying tags + +### Assets + +Tags are useful for organizing assets in non-hierarchical ways. Unlike [asset groups](/concepts/assets/software-defined-assets#grouping-assets), where each asset belongs to a single group, each asset can have many tags. For example: + +```python file=/concepts/metadata-tags/tags.py startafter=start_asset_tags endbefore=end_asset_tags +@asset(tags={"domain": "marketing", "pii": "true"}) +def leads(): ... +``` + +Other APIs that define assets can also accept tag arguments, such as , , and the . + +### Jobs + +Adding tags to a job will attach them to every run executed by the job. + +In some cases, Dagster will automatically add tags to runs. Refer to the [System run tags reference](#system-run-tags) for a list of tags. + + + + +#### Asset jobs + +Tags can be applied to [asset jobs](/concepts/assets/asset-jobs), which are defined using : + +```python file=/concepts/metadata-tags/tags.py startafter=start_asset_job_tags endbefore=end_asset_job_tags +asset_job = define_asset_job( + name="marketing_job", selection="*", tags={"dagster/max_runtime": 10} +) +``` + + + + +#### Op jobs + +Tags can be applied to [op jobs](/concepts/ops-jobs-graphs/jobs), which are defined using : + +```python file=/concepts/metadata-tags/tags.py startafter=start_op_job_tags endbefore=end_op_job_tags +@job(tags={"domain": "marketing"}) +def email_job(): + send_email() +``` + +In addition to adding tags on the decorator, you can also add metadata using the method: + +```python file=/concepts/metadata-tags/tags.py startafter=start_tags_on_graph_to_job endbefore=end_tags_on_graph_to_job +my_second_job_with_tags = email_graph.to_job(tags={"domain": "marketing"}) +``` + + + + +--- + +## Customizing run execution + +While tags are primarily used for labeling and organization, some run execution features are controlled using run tags: + +- [Customizing Kubernetes config](/deployment/guides/kubernetes/customizing-your-deployment) +- [Specifying Celery config](/deployment/guides/kubernetes/deploying-with-helm-advanced#configuring-celery-queues) +- [Setting concurrency limits when using the `QueuedRunCoordinator`](/guides/limiting-concurrency-in-data-pipelines) +- [Setting the priority of different runs](/guides/customizing-run-queue-priority) + +--- + +## Viewing tags in the Dagster UI + + + +View tags attached to runs by navigating to the **Runs** page in the UI: + + + +When executing a job, use the **Launchpad** to add tags to the run: + + + +--- + +## References + +### System run tags + +The following table lists the tags Dagster will, on occasion, automatically add to runs. + +| Tag | Description | +| ----------------------- | ----------------------------------- | +| `dagster/op_selection` | The op selection for the run | +| `dagster/partition` | The partition of the run | +| `dagster/schedule_name` | The schedule that triggered the run | +| `dagster/sensor_name` | The sensor that triggered the run | +| `dagster/backfill` | The backfill ID | +| `dagster/parent_run_id` | The parent run of a re-executed run | +| `dagster/image` | The Docker image tag | + +--- + +## Related + + + + + + + + diff --git a/docs/content/concepts/ops-jobs-graphs/graphs.mdx b/docs/content/concepts/ops-jobs-graphs/graphs.mdx index 8b38e2727445a..c0b4e18b063c6 100644 --- a/docs/content/concepts/ops-jobs-graphs/graphs.mdx +++ b/docs/content/concepts/ops-jobs-graphs/graphs.mdx @@ -9,7 +9,7 @@ A graph is a set of interconnected [ops](/concepts/ops-jobs-graphs/ops) or sub-g Graphs can be used in three different ways: -- [**To back assets**](/concepts/assets/graph-backed-assets) - Basic software-defined assets are computed using a single op, but if computing one of your assets requires multiple discrete steps, you can compute it using a graph instead. +- [**To back assets**](/concepts/assets/graph-backed-assets) - Basic assets are computed using a single op, but if computing one of your assets requires multiple discrete steps, you can compute it using a graph instead. - [**Directly inside a job**](/concepts/ops-jobs-graphs/op-jobs) - Each op job contains a graph. - [**Inside other graphs**](/concepts/ops-jobs-graphs/nesting-graphs) - You can build complex graphs out of simpler graphs. @@ -272,21 +272,19 @@ from dagster import asset, job, op @asset -def emails_to_send(): - ... +def emails_to_send(): ... @op -def send_emails(emails) -> None: - ... +def send_emails(emails) -> None: ... @job def send_emails_job(): - send_emails(emails_to_send.to_source_asset()) + send_emails(emails_to_send.get_asset_spec()) ``` -We must use the , because are used to represent assets that other assets or jobs depend on, in settings where they won't be materialized themselves. +We must use the , because are used to represent assets that other assets or jobs depend on, in settings where they won't be materialized themselves. If the asset is partitioned, then: @@ -403,9 +401,9 @@ def define_dep_dsl_graph() -> GraphDefinition: ### Inside assets -Op graphs can be used to create [software-defined assets](/concepts/assets/software-defined-assets). Graph-backed assets are useful if you have an existing op graph that produces and consumes assets. +Op graphs can be used to create [asset definitions](/concepts/assets/software-defined-assets). Graph-backed assets are useful if you have an existing op graph that produces and consumes assets. -Wrapping your graph inside a software-defined asset gives you all the benefits of software-defined assets — like cross-job lineage — without requiring you to change the code inside your graph. Refer to the [graph-backed assets documentation](/concepts/assets/graph-backed-assets) for more info and examples. +Wrapping your graph inside an asset definition gives you all the benefits of software-defined assets — like cross-job lineage — without requiring you to change the code inside your graph. Refer to the [graph-backed assets documentation](/concepts/assets/graph-backed-assets) for more info and examples. ### Directly inside op jobs diff --git a/docs/content/concepts/ops-jobs-graphs/job-execution.mdx b/docs/content/concepts/ops-jobs-graphs/job-execution.mdx index de4811ba4d3dc..e587ade000a62 100644 --- a/docs/content/concepts/ops-jobs-graphs/job-execution.mdx +++ b/docs/content/concepts/ops-jobs-graphs/job-execution.mdx @@ -15,7 +15,7 @@ Dagster provides several methods to execute [op](/concepts/ops-jobs-graphs/op-jo You can also launch jobs in other ways: -- [Schedules](/concepts/partitions-schedules-sensors/schedules) can be used to launch runs on a fixed interval. +- [Schedules](/concepts/automation/schedules) can be used to launch runs on a fixed interval. - [Sensors](/concepts/partitions-schedules-sensors/sensors) allow you to launch runs based on external state changes. --- @@ -220,8 +220,7 @@ For example, the following job will execute at most two ops at once with the `da } } ) -def tag_concurrency_job(): - ... +def tag_concurrency_job(): ... ``` **Note:** These limits are only applied on a per-run basis. You can apply op concurrency limits across multiple runs using the or . diff --git a/docs/content/concepts/ops-jobs-graphs/jobs.mdx b/docs/content/concepts/ops-jobs-graphs/jobs.mdx index 09af2ffb4d92c..e69cefcee9ea8 100644 --- a/docs/content/concepts/ops-jobs-graphs/jobs.mdx +++ b/docs/content/concepts/ops-jobs-graphs/jobs.mdx @@ -5,7 +5,7 @@ description: Jobs are the main unit of execution and monitoring in Dagster. # Jobs -Jobs are the main unit of execution and monitoring in Dagster. They allow you to execute a portion of a graph of [Software-defined Assets](/concepts/assets/software-defined-assets) or [ops](/concepts/ops-jobs-graphs/ops) based on a schedule or an external trigger. +Jobs are the main unit of execution and monitoring in Dagster. They allow you to execute a portion of a graph of [asset definitions](/concepts/assets/software-defined-assets) or [ops](/concepts/ops-jobs-graphs/ops) based on a schedule or an external trigger. When a job begins, it kicks off a _run_. A run is a single execution of a job in Dagster. Runs can be launched and viewed in the Dagster UI. @@ -15,7 +15,7 @@ When a job begins, it kicks off a _run_. A run is a single execution of a job in Using jobs provides the following benefits: -- **Automation**: With [schedules](/concepts/partitions-schedules-sensors/schedules) and [sensors](/concepts/partitions-schedules-sensors/sensors), jobs can be used to automate the execution of your Dagster pipelines. Refer to the [Automation guide](/concepts/automation) for more info. +- **Automation**: With [schedules](/concepts/automation/schedules) and [sensors](/concepts/partitions-schedules-sensors/sensors), jobs can be used to automate the execution of your Dagster pipelines. Refer to the [Automation guide](/concepts/automation) for more info. - **Control job run priority**: If using a [run queue](/deployment/run-coordinator), you can apply custom prioritization rules to how job runs are prioritized and executed. - **Potential for improved efficency**: By applying concurrency limits to job runs, there may be benefits to your pipeline's efficiency. Refer to the [Limiting run concurrency guide](/guides/limiting-concurrency-in-data-pipelines) for more info and examples. @@ -23,16 +23,16 @@ Using jobs provides the following benefits: ## Uses -Jobs are supported for both Software-defined Assets and ops, but the usage for each concept is unique. Refer to the following documentation for more info: +Jobs are supported for both asset definitions and ops, but the usage for each concept is unique. Refer to the following documentation for more info: - [Asset jobs](/concepts/assets/asset-jobs) - [Op jobs](/concepts/ops-jobs-graphs/op-jobs) With jobs, you can: -- Automate the execution of [Software-defined Assets](/concepts/assets/software-defined-assets) and [ops](/concepts/ops-jobs-graphs/ops) +- Automate the execution of [asset definitions](/concepts/assets/software-defined-assets) and [ops](/concepts/ops-jobs-graphs/ops) - Materialize a selection of assets based on a schedule or external trigger (sensor) -- Apply metadata about jobs and runs using [tags](/concepts/ops-jobs-graphs/metadata-tags) +- Attach information using [metadata](/concepts/metadata-tags) and [tags](/concepts/metadata-tags/tags) - View and launch runs of jobs in the [Dagster UI](/concepts/webserver/ui) --- @@ -45,8 +45,12 @@ With jobs, you can: href="/concepts/ops-jobs-graphs/job-execution" > + decorator), and tags are attached to runs that are created by executing a job. - -## Metadata - -Metadata allows you to attach information to a job. This information can be whatever you want, but possible cases include: - -- keeping track of the team responsible for maintaining a job -- linking to documentation or other resources -- displaying the git hash corresponding to the current job definition - -Note: If you are running Dagster using separate webserver and user code installations (more info [here](/deployment/overview)), then your `dagster-webserver` installation must be >=0.14.18 to use metadata on jobs. - -### Specifying metadata - -When you attach metadata to a job, you do it as a dictionary of key value pairs. The keys must be a string, but the values can be any one of the classes we provide. You can also use primitive python types as values, and dagster will convert them to the appropriate . - -```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_metadata_on_job endbefore=end_metadata_on_job -@op -def my_op(): - return "Hello World!" - - -@job( - metadata={ - "owner": "data team", # will be converted to MetadataValue.text - "docs": MetadataValue.url("https://docs.dagster.io"), - } -) -def my_job_with_metadata(): - my_op() -``` - -In addition to adding metadata on the `@job` decorator, you can also add metadata using the method. - -```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_metadata_on_graph_to_job endbefore=end_metadata_on_graph_to_job -@graph -def my_graph(): - my_op() - - -my_second_job_with_metadata = my_graph.to_job( - metadata={"owner": "api team", "docs": MetadataValue.url("https://docs.dagster.io")} -) -``` - -### Viewing Metadata - -After attaching metadata to a job, you can view it in the Dagster UI by navigating to the job overview page. Metadata will be displayed in the right pane. - - - -## Tags - -Tags allow you to attach information to the run created when you execute a job. Tags can contain any information you want, and dagster will also attach some tags to your runs (we'll cover these later). - -### Specifying tags - -You can specify tags you want attached to every run by adding them to a job. Tags are specified as a dictionary of key value pairs where the key must be a string and the value must be a string or json that is serializable to a string. - -```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_tags_on_job endbefore=end_tags_on_job -@job(tags={"foo": "bar"}) -def my_job_with_tags(): - my_op() -``` - -In addition to adding tags on the `@job` decorator, you can also add metadata using the method. - -```python file=/concepts/ops_jobs_graphs/metadata_tags.py startafter=start_tags_on_graph_to_job endbefore=end_tags_on_graph_to_job -my_second_job_with_tags = my_graph.to_job(tags={"foo": "bar"}) -``` - -When executing a job, you can add tags to the run using the Launchpad in the UI tag-adder.png - -### Viewing Tags - -You can view the tags that have been attached to runs by going to the Runs page in the UI tags-viewer.png - -### Dagster-provided tags - -Dagster will automatically add tags to your runs in some cases including: - -- `dagster/op_selection` - The op selection for the run, if applicable -- `dagster/partition` - The partition of the run, if applicable -- `dagster/schedule_name` - The schedule that triggered the run, if applicable -- `dagster/sensor_name` - The sensor that triggered the run, if applicable -- `dagster/backfill` - The backfill ID, if applicable -- `dagster/parent_run_id` - The parent run of a re-executed run -- `dagster/image` - The Docker image tag - -### Using tags to affect run execution - -Some features of Dagster are controlled using the tags attached to a run. Some examples include: - -- [Customizing Kubernetes config](/deployment/guides/kubernetes/customizing-your-deployment) -- [Specifying celery config](/deployment/guides/kubernetes/deploying-with-helm-advanced#configuring-celery-queues) -- [Setting concurrency limits when using the `QueuedRunCoordinator`](/guides/limiting-concurrency-in-data-pipelines) -- [Setting the priority of different runs](/guides/customizing-run-queue-priority) diff --git a/docs/content/concepts/ops-jobs-graphs/nesting-graphs.mdx b/docs/content/concepts/ops-jobs-graphs/nesting-graphs.mdx index 7b3e802251c14..631e163799bce 100644 --- a/docs/content/concepts/ops-jobs-graphs/nesting-graphs.mdx +++ b/docs/content/concepts/ops-jobs-graphs/nesting-graphs.mdx @@ -218,7 +218,7 @@ from dagster import GraphOut @op def echo(i): - print(i) # noqa: T201 + print(i) @op diff --git a/docs/content/concepts/ops-jobs-graphs/op-events.mdx b/docs/content/concepts/ops-jobs-graphs/op-events.mdx index 9f2ebae492931..d13b6305fc581 100644 --- a/docs/content/concepts/ops-jobs-graphs/op-events.mdx +++ b/docs/content/concepts/ops-jobs-graphs/op-events.mdx @@ -126,7 +126,7 @@ to keep track of can be considered an asset. Generally, you'd want to send this event directly after you persist the asset to your external system. All events must define an `asset_key`, which is a unique identifier to describe the asset you are persisting. They can optionally include a `partition` if they're persisting a particular [partition](/concepts/partitions-schedules-sensors/partitions) of an asset. -If you're using [Software-defined Assets](/concepts/assets/software-defined-assets), you don't need to record these events explicitly – the framework handles it for you. +If you're using [asset definitions](/concepts/assets/software-defined-assets), you don't need to record these events explicitly – the framework handles it for you. ```python file=/concepts/ops_jobs_graphs/op_events.py startafter=start_asset_op endbefore=end_asset_op from dagster import AssetMaterialization, op, OpExecutionContext diff --git a/docs/content/concepts/ops-jobs-graphs/op-jobs.mdx b/docs/content/concepts/ops-jobs-graphs/op-jobs.mdx index 2ec9663bd54d8..aa100cd64b87f 100644 --- a/docs/content/concepts/ops-jobs-graphs/op-jobs.mdx +++ b/docs/content/concepts/ops-jobs-graphs/op-jobs.mdx @@ -7,7 +7,7 @@ description: An op job Looking to materialize{" "} - Software-defined Assets{" "} + asset definitions{" "} instead of ops? Check out the{" "} Asset jobs documentation. @@ -17,7 +17,7 @@ description: An op job Op jobs can be launched in a few different ways: - Manually from the Dagster UI -- At fixed intervals, by [schedules](/concepts/partitions-schedules-sensors/schedules) +- At fixed intervals, by [schedules](/concepts/automation/schedules) - When external changes occur, using [sensors](/concepts/partitions-schedules-sensors/sensors) --- @@ -71,7 +71,7 @@ When defining an op job, you can provide any of the following: - [Resources](/concepts/resources) - [Configuration](/concepts/configuration/config-schema) - [Hooks](/concepts/ops-jobs-graphs/op-hooks) -- [Tags](/concepts/ops-jobs-graphs/metadata-tags#tags) +- [Tags](/concepts/metadata-tags/tags) and [other metadata](/concepts/metadata-tags/op-job-metadata) - An [executor](/deployment/executors) ### From a graph @@ -87,8 +87,7 @@ from dagster import graph, op, ConfigurableResource class Server(ConfigurableResource): - def ping_server(self): - ... + def ping_server(self): ... @op @@ -222,8 +221,7 @@ from dagster import Definitions, job @job -def do_it_all(): - ... +def do_it_all(): ... defs = Definitions(jobs=[do_it_all]) diff --git a/docs/content/concepts/ops-jobs-graphs/ops.mdx b/docs/content/concepts/ops-jobs-graphs/ops.mdx index 1a1a216a135cf..716c2e1b900a9 100644 --- a/docs/content/concepts/ops-jobs-graphs/ops.mdx +++ b/docs/content/concepts/ops-jobs-graphs/ops.mdx @@ -15,7 +15,7 @@ An individual op should perform relatively simple tasks, such as: - Querying an API and storing the result in a data warehouse - Sending an email or Slack message -The computational core of a [software-defined asset](/concepts/assets/software-defined-assets) is an op. Collections of ops can also be assembled to create a [graph](/concepts/ops-jobs-graphs/graphs). +The computational core of an [asset definition](/concepts/assets/software-defined-assets) is an op. Collections of ops can also be assembled to create a [graph](/concepts/ops-jobs-graphs/graphs). ops diff --git a/docs/content/concepts/partitions-schedules-sensors/backfills.mdx b/docs/content/concepts/partitions-schedules-sensors/backfills.mdx index 83cedfa4ddbae..8aeab7046191e 100644 --- a/docs/content/concepts/partitions-schedules-sensors/backfills.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/backfills.mdx @@ -58,7 +58,14 @@ To get this behavior, you need to: Which property to use depends on whether it's most convenient for you to operate on start/end datetime objects, start/end partition keys, or a list of partition keys. -For example: +**If you're using an [I/O manager](/concepts/io-management/io-managers)**, you'll also need to make sure that the I/O manager is using these methods. **Note**: File system I/O managers do not support single-run backfills, but Dagster's built-in database I/O managers - like [Snowflake](/integrations/snowflake), [BigQuery](/integrations/bigquery), or [DuckDB](/\_apidocs/libraries/dagster-duckdb) - include this functionality out of the box. + +Use the following tabs to check out examples both with and without I/O managers. + + + + +#### Without I/O managers ```python file=/concepts/partitions_schedules_sensors/backfills/single_run_backfill_asset.py startafter=start_marker endbefore=end_marker from dagster import ( @@ -75,7 +82,7 @@ from dagster import ( backfill_policy=BackfillPolicy.single_run(), deps=[AssetKey("raw_events")], ) -def events(context: AssetExecutionContext): +def events(context: AssetExecutionContext) -> None: start_datetime, end_datetime = context.partition_time_window input_data = read_data_in_datetime_range(start_datetime, end_datetime) @@ -84,10 +91,22 @@ def events(context: AssetExecutionContext): overwrite_data_in_datetime_range(start_datetime, end_datetime, output_data) ``` -If you are using an I/O manager to handle saving and loading your data, you'll need to ensure the I/O manager is also using these methods. If you're using any of the built-in database I/O managers, like [Snowflake](/integrations/snowflake), [BigQuery](/integrations/bigquery), or [DuckDB](/\_apidocs/libraries/dagster-duckdb), you'll have this out-of-the-box. **Note**: This doesn't apply to file system I/O managers. + + + +#### With I/O managers ```python file=/concepts/partitions_schedules_sensors/backfills/single_run_backfill_io_manager.py startafter=start_marker endbefore=end_marker -from dagster import InputContext, IOManager, OutputContext +from dagster import ( + AssetExecutionContext, + AssetSpec, + BackfillPolicy, + DailyPartitionsDefinition, + InputContext, + IOManager, + OutputContext, + asset, +) class MyIOManager(IOManager): @@ -98,8 +117,25 @@ class MyIOManager(IOManager): def handle_output(self, context: OutputContext, obj): start_datetime, end_datetime = context.asset_partitions_time_window return overwrite_data_in_datetime_range(start_datetime, end_datetime, obj) + + +daily_partition = DailyPartitionsDefinition(start_date="2020-01-01") + +raw_events = AssetSpec("raw_events", partitions_def=daily_partition) + + +@asset( + partitions_def=daily_partition, + backfill_policy=BackfillPolicy.single_run(), +) +def events(context: AssetExecutionContext, raw_events): + output_data = compute_events_from_raw_events(raw_events) + return output_data ``` + + + --- ## Launching backfills for partitioned jobs @@ -113,6 +149,8 @@ To launch and monitor backfills for a job, use the [**Partitions** tab](/concept - **For the default run coordinator**, the modal will exit after all runs have been launched - **For the queued run coordinator**, the modal will exit after all runs have been queued +**Note**: If the job targets assets that have backfill policies, the assets' backfill policies will control which runs are launched. + After all the runs have been submitted, you'll be returned to the **Partitions** page, with a filter for runs inside the backfill. This page refreshes periodically and allows you to see how the backfill is progressing. Boxes will become green or red as steps in the backfill runs succeed or fail: - This page is specific to Software-defined Assets (SDAs). - Looking for ops? Refer to the{" "} + This page is specific to asset definitions. Looking for ops? + Refer to the{" "} Partitioned ops {" "} documentation. -A [Software-defined Asset](/concepts/assets/software-defined-assets) (SDA) can represent a collection of _partitions_ that can be tracked and materialized independently. In many ways, each partition functions like its own mini-asset, but they all share a common materialization function and dependencies. Typically, each partition will correspond to a separate file or a slice of a table in a database. +A [asset definition](/concepts/assets/software-defined-assets) can represent a collection of _partitions_ that can be tracked and materialized independently. In many ways, each partition functions like its own mini-asset, but they all share a common materialization function and dependencies. Typically, each partition will correspond to a separate file or a slice of a table in a database. A common use is for each partition to represent all the records in a data set that fall within a particular time window, e.g. hourly, daily or monthly. Alternatively, each partition can represent a region, a customer, an experiment - any dimension along which you want to be able to materialize and monitor independently. An asset can also be partitioned along multiple dimensions, e.g. by region and by hour. @@ -28,31 +28,14 @@ Once an asset has a set of partitions, you can launch materializations of indivi Before continuing, you should be familiar with: -- [Software-defined Assets](/concepts/assets/software-defined-assets) +- [Asset definitions](/concepts/assets/software-defined-assets) - [Jobs](/concepts/ops-jobs-graphs/jobs) --- -## Relevant APIs - -| Name | Description | -| ------------------------------------------------------------------ | --------------------------------------------------------------------------------- | -| | Superclass - defines the set of partitions that can be materialized for an asset. | -| | A partitions definition with a partition for each hour. | -| | A partitions definition with a partition for each day. | -| | A partitions definition with a partition for each week. | -| | A partitions definition with a partition for each month. | -| | A partitions definition with a fixed set of partitions. | -| | A partitions definition with multiple dimensions. | -| | A multi-dimensional partition key. | -| | A partitions definition whose partitions can be dynamically added and removed. | -| | The partition key for the current run, which can be accessed in the computation. | - ---- - ## Defining partitioned assets -A Software-defined Asset can be assigned a , which determines the set of partitions that compose it. If the asset is stored in a filesystem or an object store, then each partition will typically correspond to a file or object. If the asset is stored in a database, then each partition will typically correspond to a range of values in a table that fall within a particular window. +An asset definitions can be assigned a , which determines the set of partitions that compose it. If the asset is stored in a filesystem or an object store, then each partition will typically correspond to a file or object. If the asset is stored in a database, then each partition will typically correspond to a range of values in a table that fall within a particular window. The following example demonstrates creating an asset that has a partition for each day since October 1st, 2023. Materializing partition `2023-11-13` of this asset would result in fetching data from the URL `https://api.nasa.gov/planetary/apod?date=2023-11-13` and storing it at the path `nasa/2023-11-13.csv`. Note that `api_key=DEMO_KEY` is used but has a limited number of calls: @@ -82,7 +65,6 @@ In the following sections, we'll demonstrate a few additional ways to partition - [**Multi-dimensionally partitioning assets**](#multi-dimensionally-partitioned-assets), for when you want assets to be partitioned by multiple dimensions - [**Dynamically partitioning assets**](#dynamically-partitioned-assets), for when you don't know the partition set before defining assets -- [**Defining partitioned assets to use partitioned I/O managers**](#partitioned-assets-with-partitioned-io-managers), for when you want an I/O manager to handle partitioned asset output ### Multi-dimensionally partitioned assets @@ -156,15 +138,14 @@ images_partitions_def = DynamicPartitionsDefinition(name="images") @asset(partitions_def=images_partitions_def) -def images(context: AssetExecutionContext): - ... +def images(context: AssetExecutionContext): ... ``` Partition keys can be added and removed for a given dynamic partition set. For example, the following code snippet demonstrates the usage of a [sensor](/concepts/partitions-schedules-sensors/sensors) to detect the presence of a new partition and then trigger a run for that partition: ```python file=/concepts/partitions_schedules_sensors/dynamic_partitioned_asset.py startafter=start_dynamic_partitions_2 endbefore=end_dynamic_partitions_2 images_job = define_asset_job( - "images_job", AssetSelection.keys("images"), partitions_def=images_partitions_def + "images_job", AssetSelection.assets("images"), partitions_def=images_partitions_def ) @@ -188,32 +169,6 @@ def image_sensor(context: SensorEvaluationContext): ) ``` -### Partitioned assets with partitioned I/O managers - - - Heads up! Familiarity with{" "} - I/O managers is required for - this section. - - -Asset functions can write data out to files, but they can also delegate the writing operation to an [I/O manager](/concepts/io-management/io-managers). Dagster's [built-in I/O managers](/concepts/io-management/io-managers#built-in-io-managers) support handling partitioned assets, but you can also [write your own I/O manager](/concepts/io-management/io-managers#handling-partitioned-assets) if you want additional customization. - -For example, this example demonstrates how to define an asset that relies on an I/O manager to store its output: - -```python file=/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py -import pandas as pd - -from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset - - -@asset(partitions_def=DailyPartitionsDefinition(start_date="2022-01-01")) -def my_daily_partitioned_asset(context: AssetExecutionContext) -> pd.DataFrame: - partition_date_str = context.partition_key - return pd.read_csv(f"coolweatherwebsite.com/weather_obs&date={partition_date_str}") -``` - -If using the default I/O manager, materializing partition `2022-07-23` of this asset would store the output `DataFrame` in a pickle file at a path like `my_daily_partitioned_asset/2022-07-23`. - ### Recommended partition limits We recommend limiting the number of partitions for each asset to 25,000 or fewer. Assets with partition counts exceeding this limit will likely have slower load times in the UI. @@ -256,8 +211,7 @@ partitions_def = DailyPartitionsDefinition(start_date="2023-01-21") @asset(partitions_def=partitions_def) -def events(): - ... +def events(): ... @asset( @@ -271,8 +225,7 @@ def events(): ) ], ) -def yesterday_event_stats(): - ... +def yesterday_event_stats(): ... ``` @@ -280,7 +233,7 @@ def yesterday_event_stats(): #### Managed-loading dependencies -To override partition dependency rules for [managed-loading asset dependencies](/concepts/assets/software-defined-assets#defining-explicit-managed-loading-dependencies), you can use a `PartitionMapping` to specify that each partition of an asset should depend on a partition in an upstream asset. +To override partition dependency rules for managed-loading asset dependencies, you can use a `PartitionMapping` to specify that each partition of an asset should depend on a partition in an upstream asset. In the following code snippet, we used a to specify that each partition of a daily-partitioned asset should depend on the prior day's partition in an upstream asset: @@ -296,8 +249,7 @@ partitions_def = DailyPartitionsDefinition(start_date="2023-01-21") @asset(partitions_def=partitions_def) -def events(): - ... +def events(): ... @asset( @@ -310,8 +262,7 @@ def events(): ) }, ) -def yesterday_event_stats(events): - ... +def yesterday_event_stats(events): ... ``` @@ -340,13 +291,11 @@ hourly_partitions_def = HourlyPartitionsDefinition(start_date="2022-05-31-00:00" @asset(partitions_def=hourly_partitions_def) -def asset1(): - ... +def asset1(): ... @asset(partitions_def=hourly_partitions_def) -def asset2(): - ... +def asset2(): ... partitioned_asset_job = define_asset_job( @@ -410,13 +359,56 @@ height={1618} --- -## See it in action +## Examples For more examples of partitions, check out the following in our [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): - [Defining partitioned assets](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/assets/core/items.py) - [Defining a partitioned asset job and a schedule based on it](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py) +### Partitioned assets with partitioned I/O managers + + + Heads up! Familiarity with{" "} + I/O managers is required for + this section. + + +Asset functions can write data out to files, but they can also delegate the writing operation to an [I/O manager](/concepts/io-management/io-managers). Dagster's [built-in I/O managers](/concepts/io-management/io-managers#built-in-io-managers) support handling partitioned assets, but you can also [write your own I/O manager](/concepts/io-management/io-managers#handling-partitioned-assets) if you want additional customization. + +For example, the following demonstrates how to define an asset that relies on an I/O manager to store its output: + +```python file=/concepts/partitions_schedules_sensors/partitioned_asset_uses_io_manager.py +import pandas as pd + +from dagster import AssetExecutionContext, DailyPartitionsDefinition, asset + + +@asset(partitions_def=DailyPartitionsDefinition(start_date="2022-01-01")) +def my_daily_partitioned_asset(context: AssetExecutionContext) -> pd.DataFrame: + partition_date_str = context.partition_key + return pd.read_csv(f"coolweatherwebsite.com/weather_obs&date={partition_date_str}") +``` + +If using the default I/O manager, materializing partition `2022-07-23` of this asset would store the output `DataFrame` in a pickle file at a path like `my_daily_partitioned_asset/2022-07-23`. + +--- + +## Relevant APIs + +| Name | Description | +| ------------------------------------------------------------------ | --------------------------------------------------------------------------------- | +| | Superclass - defines the set of partitions that can be materialized for an asset. | +| | A partitions definition with a partition for each hour. | +| | A partitions definition with a partition for each day. | +| | A partitions definition with a partition for each week. | +| | A partitions definition with a partition for each month. | +| | A partitions definition with a fixed set of partitions. | +| | A partitions definition with multiple dimensions. | +| | A multi-dimensional partition key. | +| | A partitions definition whose partitions can be dynamically added and removed. | +| | The partition key for the current run, which can be accessed in the computation. | + --- ## Related @@ -436,6 +428,6 @@ For more examples of partitions, check out the following in our [Hacker News exa > diff --git a/docs/content/concepts/partitions-schedules-sensors/partitioning-ops.mdx b/docs/content/concepts/partitions-schedules-sensors/partitioning-ops.mdx index 80f6d930251ac..ba306700bdcb3 100644 --- a/docs/content/concepts/partitions-schedules-sensors/partitioning-ops.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/partitioning-ops.mdx @@ -96,7 +96,7 @@ from datetime import datetime @daily_partitioned_config(start_date=datetime(2020, 1, 1)) -def my_partitioned_config(start: datetime, _end: datetime): +def partitioned_config(start: datetime, _end: datetime): return { "ops": { "process_data_for_date": {"config": {"date": start.strftime("%Y-%m-%d")}} @@ -107,8 +107,8 @@ def my_partitioned_config(start: datetime, _end: datetime): Then you can build a job that uses the `PartitionedConfig` by supplying it to the `config` argument when you construct the job: ```python file=/concepts/partitions_schedules_sensors/partitioned_job.py startafter=start_partitioned_job endbefore=end_partitioned_job -@job(config=my_partitioned_config) -def do_stuff_partitioned(): +@job(config=partitioned_config) +def partitioned_op_job(): process_data_for_date() ``` @@ -157,43 +157,7 @@ def continent_job(): Running a partitioned job on a schedule is a common use case. For example, if your job has a partition for each date, you likely want to run that job every day, on the partition for that day. -The function allows you to construct a schedule from a date partitioned job. It creates a schedule with an interval which matches the spacing of your partition. If you wanted to create a schedule for `do_stuff_partitioned` job defined above, you could write: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_marker endbefore=end_marker -from dagster import build_schedule_from_partitioned_job, job - - -@job(config=my_partitioned_config) -def do_stuff_partitioned(): - ... - - -do_stuff_partitioned_schedule = build_schedule_from_partitioned_job( - do_stuff_partitioned, -) -``` - -Schedules can also be made from static partitioned jobs. If you wanted to make a schedule for the `continent_job` above that runs each partition, you could write: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_static_partition endbefore=end_static_partition -from dagster import schedule, RunRequest - - -@schedule(cron_schedule="0 0 * * *", job=continent_job) -def continent_schedule(): - for c in CONTINENTS: - yield RunRequest(run_key=c, partition_key=c) -``` - -Or a schedule that will run a subselection of the partition: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_single_partition endbefore=end_single_partition -@schedule(cron_schedule="0 0 * * *", job=continent_job) -def antarctica_schedule(): - return RunRequest(partition_key="Antarctica") -``` - -Refer to the [Schedules documentation](/concepts/partitions-schedules-sensors/schedules#schedules-from-partitioned-assets-and-jobs) for more info about constructing both schedule types. +Refer to the [Schedule documentation](/concepts/automation/schedules/partitioned-schedules) for more info about constructing both schedules for asset and op-based jobs. --- diff --git a/docs/content/concepts/partitions-schedules-sensors/partitions.mdx b/docs/content/concepts/partitions-schedules-sensors/partitions.mdx index 6830ab267e3c8..c3e5156bd916c 100644 --- a/docs/content/concepts/partitions-schedules-sensors/partitions.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/partitions.mdx @@ -5,7 +5,7 @@ description: Partitions break data into smaller chunks, allowing you to compute # Partitions -A Software-defined Asset can represent a collection of _partitions_ that can be tracked and materialized independently. In many ways, each partition functions like its own mini-asset, but they all share a common materialization function and dependencies. Typically, each partition will correspond to a separate file, or a slice of a table in a database. +An asset definition can represent a collection of _partitions_ that can be tracked and materialized independently. In many ways, each partition functions like its own mini-asset, but they all share a common materialization function and dependencies. Typically, each partition will correspond to a separate file, or a slice of a table in a database. Consider an online store's order data. In a database, the order data might be stored as a single `orders` table, which contains multiple days' worth of orders. However, if the data were ingested into Amazon Web Services (AWS) S3 as parquet files, you could create a new parquet file per day or partition. @@ -25,7 +25,7 @@ Using partitions provides the following benefits: ## Uses -Partitions are supported for both Software-defined Assets and ops, but how each concept is used is unique. Refer to the following documentation for more info: +Partitions are supported for both asset definitions and ops, but how each concept is used is unique. Refer to the following documentation for more info: - [Partitioned assets](/concepts/partitions-schedules-sensors/partitioning-assets) - [Partitioned ops](/concepts/partitions-schedules-sensors/partitioning-ops) (Advanced) @@ -34,5 +34,5 @@ Partitions are supported for both Software-defined Assets and ops, but how each With partitions, you can: - View runs by partition in the [Dagster UI](/concepts/webserver/ui) -- Define a [schedule](/concepts/partitions-schedules-sensors/schedules) that fills in a partition each time it runs. For example, a job might run each day and process the data that arrived during the previous day. +- Define a [schedule](/concepts/automation/schedules) that fills in a partition each time it runs. For example, a job might run each day and process the data that arrived during the previous day. - Launch [backfills](/concepts/partitions-schedules-sensors/backfills), which are sets of runs that each process a different partition. For example, after making a code change, you might want to run your job on all partitions instead of just one of them. diff --git a/docs/content/concepts/partitions-schedules-sensors/schedules.mdx b/docs/content/concepts/partitions-schedules-sensors/schedules.mdx deleted file mode 100644 index 790b73955bb54..0000000000000 --- a/docs/content/concepts/partitions-schedules-sensors/schedules.mdx +++ /dev/null @@ -1,551 +0,0 @@ ---- -title: Schedules | Dagster -description: A Dagster schedule submits job runs at a fixed interval. ---- - -# Schedules - -A schedule is a Dagster definition that is used to execute a [job](/concepts/ops-jobs-graphs/jobs) at a fixed interval. Each time at which a schedule is evaluated is called a tick. The schedule definition can generate run configuration for the job on each tick. - -Each schedule: - -- Targets a single job -- Optionally defines a function that returns either: - - One or more objects. Each run request launches a run. - - An optional , which specifies a message which describes why no runs were requested - -Dagster includes a scheduler, which runs as part of the dagster-daemon process. Once you have defined a schedule, see the [dagster-daemon](/deployment/dagster-daemon) page for instructions on how to run the daemon in order to execute your schedules. - ---- - -## Relevant APIs - -| Name | Description | -| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | -| | Decorator that defines a schedule that executes according to a given cron schedule. | -| | Class for schedules. | -| | A function that constructs a schedule whose interval matches the partitioning of a partitioned job. | -| | The context passed to the schedule definition execution function | -| | A function that constructs a `ScheduleEvaluationContext`, typically used for testing. | - ---- - -## Defining schedules - -You define a schedule by constructing a . In this section: - -- [Basic schedules](#basic-schedules) -- [Schedules that provide custom run config and tags](#schedules-that-provide-custom-run-config-and-tags) -- [Schedules from partitioned assets and jobs](#schedules-from-partitioned-assets-and-jobs) -- [Customizing execution times](#customizing-execution-times), including [timezones](#customizing-the-executing-timezone) and accounting for [Daylight Savings Time](#execution-time-and-daylight-savings-time) - -### Basic schedules - -Here's a simple schedule that runs a job every day, at midnight: - -```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_basic_schedule endbefore=end_basic_schedule -@job -def my_job(): - ... - - -basic_schedule = ScheduleDefinition(job=my_job, cron_schedule="0 0 * * *") -``` - -The `cron_schedule` argument accepts standard [cron expressions](https://en.wikipedia.org/wiki/Cron). It also accepts `"@hourly"`, `"@daily"`, `"@weekly"`, and `"@monthly"` if your `croniter` dependency's version is >= 1.0.12. - -To run schedules for assets, you can [build a job that materializes assets](/concepts/assets/software-defined-assets#building-jobs-that-materialize-assets) and construct a : - -```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_basic_asset_schedule endbefore=end_basic_asset_schedule -from dagster import AssetSelection, define_asset_job - -asset_job = define_asset_job("asset_job", AssetSelection.groups("some_asset_group")) - -basic_schedule = ScheduleDefinition(job=asset_job, cron_schedule="0 0 * * *") -``` - -### Schedules that provide custom run config and tags - -If you want to vary the behavior of your job based on the time it's scheduled to run, you can use the decorator, which decorates a function that returns run config based on a provided : - -```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_run_config_schedule endbefore=end_run_config_schedule -@op(config_schema={"scheduled_date": str}) -def configurable_op(context: OpExecutionContext): - context.log.info(context.op_config["scheduled_date"]) - - -@job -def configurable_job(): - configurable_op() - - -@schedule(job=configurable_job, cron_schedule="0 0 * * *") -def configurable_job_schedule(context: ScheduleEvaluationContext): - scheduled_date = context.scheduled_execution_time.strftime("%Y-%m-%d") - return RunRequest( - run_key=None, - run_config={ - "ops": {"configurable_op": {"config": {"scheduled_date": scheduled_date}}} - }, - tags={"date": scheduled_date}, - ) -``` - -If you don't need access to the context parameter, you can omit it from the decorated function. - -### Schedules from partitioned assets and jobs - -- [Time partitioned jobs and assets](#time-partitioned-jobs-and-assets) -- [Static partitioned jobs](#static-partitioned-jobs) - -#### Time partitioned jobs and assets - -When you have a [partitioned job](/concepts/partitions-schedules-sensors/partitions) that's partitioned by time, you can use the function to construct a schedule for it whose interval matches the spacing of partitions in your job. For example, if you have a daily partitioned job that fills in a date partition of a table each time it runs, you likely want to run that job every day. - -Having defined a date-partitioned job, you can construct a schedule for it using . For example: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_marker endbefore=end_marker -from dagster import build_schedule_from_partitioned_job, job - - -@job(config=my_partitioned_config) -def do_stuff_partitioned(): - ... - - -do_stuff_partitioned_schedule = build_schedule_from_partitioned_job( - do_stuff_partitioned, -) -``` - -The [Partitioning assets](/concepts/partitions-schedules-sensors/partitioning-assets) documentation includes an [example of a date-partitioned asset](/concepts/partitions-schedules-sensors/partitioning-assets#defining-partitioned-assets). You can define a schedule similarly using : - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_partitioned_asset_schedule endbefore=end_partitioned_asset_schedule -from dagster import ( - asset, - build_schedule_from_partitioned_job, - define_asset_job, - HourlyPartitionsDefinition, -) - - -@asset(partitions_def=HourlyPartitionsDefinition(start_date="2020-01-01-00:00")) -def hourly_asset(): - ... - - -partitioned_asset_job = define_asset_job("partitioned_job", selection=[hourly_asset]) - - -asset_partitioned_schedule = build_schedule_from_partitioned_job( - partitioned_asset_job, -) -``` - -Each schedule tick of a partitioned job fills in the latest partition in the partition set that exists as of the tick time. Note that this implies that when the schedule submits a run on a particular day, it will typically be for the partition whose key corresponds to the previous day. For example, the schedule will fill in the `2020-04-01` partition on `2020-04-02`. That's because each partition corresponds to a time window. The key of the partition is the start of the time window, but the partition isn't included in the list until its time window has completed. Waiting until the time window has finished before Kicking off a run means the run can process data from within that entire time window. - -However, you can use the `end_offset` parameter of to change which partition is the most recent partition that is filled in at each schedule tick. Setting `end_offset` to `1` will extend the partitions forward so that the schedule tick that runs on day `N` will fill in day `N`'s partition instead of day `N-1`, and setting `end_offset` to a negative number will cause the schedule to fill in earlier days' partitions. In general, setting `end_offset` to `X` will cause the partition that runs on day `N` to fill in the partition for day `N - 1 + X`. The same holds true for hourly, weekly, and monthly partitioned jobs, for their respective partition sizes. - -You can use the `minute_of_hour`, `hour_of_day`, `day_of_week`, and `day_of_month` parameters of `build_schedule_from_partitioned_job` to control the timing of the schedule. For example, if you have a job that's partitioned by date, and you set `minute_of_hour` to `30` and `hour_of_day` to `1`, the schedule would submit the run for partition `2020-04-01` at 1:30 AM on `2020-04-02`. - -#### Static partitioned jobs - -You can also create a schedule for a static partition. The Partitioned Jobs concepts page also includes an [example of how to define a static partitioned job](/concepts/partitions-schedules-sensors/partitioning-ops#defining-jobs-with-static-partitions). To define a schedule for a static partitioned job, we will construct a schedule from scratch, rather than using a helper function like `build_schedule_from_partitioned_job` this will allow more flexibility in determining which partitions should be run by the schedule. - -For example, if we have the continents static partitioned job from the Partitioned Jobs concept page - -```python file=/concepts/partitions_schedules_sensors/static_partitioned_job.py -from dagster import Config, OpExecutionContext, job, op, static_partitioned_config - -CONTINENTS = [ - "Africa", - "Antarctica", - "Asia", - "Europe", - "North America", - "Oceania", - "South America", -] - - -@static_partitioned_config(partition_keys=CONTINENTS) -def continent_config(partition_key: str): - return {"ops": {"continent_op": {"config": {"continent_name": partition_key}}}} - - -class ContinentOpConfig(Config): - continent_name: str - - -@op -def continent_op(context: OpExecutionContext, config: ContinentOpConfig): - context.log.info(config.continent_name) - - -@job(config=continent_config) -def continent_job(): - continent_op() -``` - -We can write a schedule that will run this partition: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_static_partition endbefore=end_static_partition -from dagster import schedule, RunRequest - - -@schedule(cron_schedule="0 0 * * *", job=continent_job) -def continent_schedule(): - for c in CONTINENTS: - yield RunRequest(run_key=c, partition_key=c) -``` - -Or a schedule that will run a subselection of the partition: - -```python file=/concepts/partitions_schedules_sensors/schedule_from_partitions.py startafter=start_single_partition endbefore=end_single_partition -@schedule(cron_schedule="0 0 * * *", job=continent_job) -def antarctica_schedule(): - return RunRequest(partition_key="Antarctica") -``` - -### Customizing execution times - -- [Customizing the executing timezone](#customizing-the-executing-timezone) -- [Execution time and Daylight Savings Time](#execution-time-and-daylight-savings-time) - -#### Customizing the executing timezone - -You can customize the timezone in which your schedule executes by setting the `execution_timezone` parameter on your schedule to any [`tz` timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). Schedules with no timezone set run in UTC. - -For example, the following schedule executes daily at 9AM in US/Pacific time: - -```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_timezone endbefore=end_timezone -my_timezone_schedule = ScheduleDefinition( - job=my_job, cron_schedule="0 9 * * *", execution_timezone="US/Pacific" -) -``` - -The decorator accepts the same argument. Schedules from partitioned jobs execute in the timezone defined on the partitioned config. - -#### Execution time and Daylight Savings Time - -Because of Daylight Savings Time transitions, it's possible to specify an execution time that does not exist for every scheduled interval. For example, say you have a daily schedule with an execution time of 2:30 AM in the US/Eastern timezone. On 2019/03/10, the time jumps from 2:00 AM to 3:00 AM when Daylight Savings Time begins. Therefore, the time of 2:30 AM did not exist for the day. - -If you specify such an execution time, Dagster runs your schedule at the next time that exists. In the previous example, the schedule would run at 3:00 AM. - -It's also possible to specify an execution time that exists twice on one day every year. For example, on 2019/11/03 in US/Eastern time, the hour from 1:00 AM to 2:00 AM repeats, so a daily schedule running at 1:30 AM has two possible times in which it could execute. In this case, Dagster will execute your schedule at the latter of the two possible times. - -Hourly schedules will be unaffected by daylight savings time transitions - the schedule will continue to run exactly once every hour, even as the timezone changes. In the example above where the hour from 1:00 AM to 2:00 AM repeats, an hourly schedule running at 30 minutes past the hour would run at 12:30 AM, both instances of 1:30 AM, and then proceed normally from 2:30 AM on. - -### Using resources in schedules - -Dagster's [resources](/concepts/resources) system can be used with schedules to make it easier to interact with external systems or to make components of a schedule easier to plug in for testing purposes. - -To specify resource dependencies, annotate the resource as a parameter to the schedule's function. Resources are provided by attaching them to your call. - -Here, a resource is provided that helps a schedule generate a date string: - -```python file=/concepts/resources/pythonic_resources.py startafter=start_new_resource_on_schedule endbefore=end_new_resource_on_schedule dedent=4 -from dagster import ( - schedule, - ScheduleEvaluationContext, - ConfigurableResource, - job, - RunRequest, - RunConfig, - Definitions, -) -from datetime import datetime -from typing import List - -class DateFormatter(ConfigurableResource): - format: str - - def strftime(self, dt: datetime) -> str: - return dt.strftime(self.format) - -@job -def process_data(): - ... - -@schedule(job=process_data, cron_schedule="* * * * *") -def process_data_schedule( - context: ScheduleEvaluationContext, - date_formatter: DateFormatter, -): - formatted_date = date_formatter.strftime(context.scheduled_execution_time) - - return RunRequest( - run_key=None, - tags={"date": formatted_date}, - ) - -defs = Definitions( - jobs=[process_data], - schedules=[process_data_schedule], - resources={"date_formatter": DateFormatter(format="%Y-%m-%d")}, -) -``` - -For more information on resources, refer to the [Resources documentation](/concepts/resources). To see how to test schedules with resources, refer to the section on [Testing schedules with resources](#testing-schedules-with-resources). - ---- - -## Running the scheduler - -Schedules must be started for them to run. Schedules can be started and stopped: - -- In the Dagster UI using the **Schedules** tab: - - Schedules tab in the Dagster UI - -- Using the CLI: - - ```shell - dagster schedule start - dagster schedule stop - ``` - -- In code by setting the schedule's default status to `DefaultScheduleStatus.RUNNING`: - - ```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_running_in_code endbefore=end_running_in_code - my_running_schedule = ScheduleDefinition( - job=my_job, cron_schedule="0 9 * * *", default_status=DefaultScheduleStatus.RUNNING - ) - ``` - -If you manually start or stop a schedule in the UI, that overrides any default status set in code. - -Once the schedule is started, the schedule will begin executing immediately if you're running the [dagster-daemon](/deployment/dagster-daemon) process as part of your deployment. Refer to the [Troubleshooting](/concepts/partitions-schedules-sensors/schedules#troubleshooting) section if your schedule has been started but isn't submitting runs. - ---- - -## Logging in schedules - - - Schedule logs are stored in your{" "} - - Dagster instance's compute log storage - - . You should ensure that your compute log storage is configured to view your schedule - logs. - - -Any schedule can emit log messages during its evaluation function: - -```python file=concepts/partitions_schedules_sensors/schedules/schedules.py startafter=start_schedule_logging endbefore=end_schedule_logging -@schedule(job=my_job, cron_schedule="* * * * *") -def logs_then_skips(context): - context.log.info("Logging from a schedule!") - return SkipReason("Nothing to do") -``` - -These logs can be viewed when inspecting a tick in the tick history view on the corresponding schedule page. - ---- - -## Testing schedules - - - - -### Via the Dagster UI - -In the UI, you can manually trigger a test evaluation of a schedule using a mock evaluation time and view the results. - -On the overview page for a particular schedule, there is a `Test schedule` button. Clicking this button will perform a test evaluation of your schedule for a provided mock evaluation time, and show you the results of that evaluation. - -1. Click **Overview > Schedules**. - -2. Click the schedule you want to test. - -3. Click the **Test Schedule** button, located near the top right corner of the page: - - - -4. You'll be prompted to select a mock schedule evaluation time. As schedules are defined on a cadence, the evaluation times provided in the dropdown are past and future times along that cadence. - - For example, let's say you're testing a schedule with a cadence of `"Every day at X time"`. In the dropdown, you'd see five evaluation times in the future and five evaluation times in the past along that cadence. - - - -5. After selecting an evaluation time, click the **Evaluate** button. A window containing the result of the evaluation will display: - - - - If the evaluation was successful and a run request was produced, you can open the launchpad pre-scaffolded with the config corresponding to that run request. - - - - -### Via Python - -To test a function decorated by the decorator, you can invoke the schedule definition like it's a regular Python function. The invocation will return run config, which can then be validated using the function. Below is a test for the `configurable_job_schedule` that we defined in [an earlier section](#schedules-that-provide-custom-run-config-and-tags). - -It uses to construct a to provide for the `context` parameter. - -```python file=concepts/partitions_schedules_sensors/schedules/schedule_examples.py startafter=start_test_cron_schedule_context endbefore=end_test_cron_schedule_context -from dagster import build_schedule_context, validate_run_config - - -def test_configurable_job_schedule(): - context = build_schedule_context( - scheduled_execution_time=datetime.datetime(2020, 1, 1) - ) - run_request = configurable_job_schedule(context) - assert validate_run_config(configurable_job, run_request.run_config) -``` - -If your `@schedule`-decorated function doesn't have a context parameter, you don't need to provide one when invoking it. - -#### Testing schedules with resources - -For schedules that utilize [resources](/concepts/resources), you can provide the necessary resources when invoking the schedule function. - -Below is a test for the `process_data_schedule` that we defined in the [Using resources in schedules](#using-resources-in-schedules) section, which uses the `date_formatter` resource. - -```python file=/concepts/resources/pythonic_resources.py startafter=start_test_resource_on_schedule endbefore=end_test_resource_on_schedule dedent=4 -from dagster import build_schedule_context, validate_run_config - -def test_process_data_schedule(): - context = build_schedule_context( - scheduled_execution_time=datetime.datetime(2020, 1, 1) - ) - run_request = process_data_schedule( - context, date_formatter=DateFormatter(format="%Y-%m-%d") - ) - assert ( - run_request.run_config["ops"]["fetch_data"]["config"]["date"] - == "2020-01-01" - ) -``` - - - - ---- - -## Troubleshooting - -Try these steps if you're trying to run a schedule and are running into problems: - -- [Verify the schedule is included as a definition](#verify-the-schedule-is-included-as-a-definition) -- [Verify the schedule has been started](#verify-that-the-schedule-has-been-started) -- [Verify schedule interval configuration](#verify-schedule-interval-configuration) -- [Verify dagster-daemon setup](#verify-dagster-daemon-setup) - -### Verify the schedule is included as a definition - -First, verify that the schedule has been included in a object. This ensures that the schedule is detectable and loadable by Dagster tools like the UI and CLI: - -```python -defs = Definitions( - assets=[asset_1, asset_2], - jobs=[job_1], - schedules=[all_assets_job_schedule], -) -``` - -Refer to the [Code locations documentation](/concepts/code-locations) for more info. - -### Verify that the schedule has been started - -Next, using the UI, verify the schedule has been started: - -1. Open the left sidenav and locate the job attached to the schedule. Schedules that have been started will have a **green clock icon** next to them: - - Started schedule icon next to schedule in left sidenav in the Dagster UI - - If the schedule appears in the list but doesn't have the green clock icon, click the schedule. On the page that displays, use the **toggle at the top of the page** to mark it as running: - - Start/stop toggle for schedules in Code Locations tab in the Dagster UI - -2. Next, verify that the UI has loaded the latest version of your schedule code: - - 1. Click **Deployment** in the top navigation. - - 2. In the **Code locations** tab, click **Reload** (local webserver) or **Redeploy** (Dagster Cloud). - - If the webserver is unable to load the code location - for example, due to a syntax error - an error message with additional info will display in the left UI sidenav. - - If the code location is loaded successfully but the schedule doesn't appear in the list of schedules, verify that the schedule is included in a object. - -### Verify schedule interval configuration - -Clicking the schedule in the left sidenav in the UI opens the **Schedule details** page for the schedule. - -If the schedule is running, a **Next tick** will display near the top of the page. This indicates when the schedule is next expected to run: - - - -Verify that the time is what you expect, including the timezone. - -### Verify dagster-daemon setup - -This section is applicable to Open Source (OSS) deployments. - -If the schedule interval is correctly configured but runs aren't being created, it's possible that the dagster-daemon process isn't working correctly. If you haven't set up a Dagster daemon yet, refer to the [Deployment guides](/deployment) for more info. - -1. First, check that the daemon is running: - - 1. In the UI, click **Deployment** in the top navigation. - 2. Click the **Daemons** tab. - 3. Locate the **Scheduler** row. - - The daemon process periodically sends out a hearbeat from the scheduler. If the scheduler daemon has a status of **Not running**, this indicates that there's an issue with your daemon deployment. If the daemon ran into an error that resulted in an exception, this error will often display in this tab. - - If there isn't a clear error on this page or if the daemon should be sending heartbeans but isn't, move on to step two. - -2. Next, check the logs from the daemon process. The steps to do this will depend on your deployment - for example, if you're using Kubernetes, you'll need to get the logs from the pod that's running the daemon. You should be able to search those logs for the name of your schedule (or `SchedulerDaemon` to see all logs associated with the scheduler) to gain an understanding of what's going wrong. - - If the daemon output contains error indicating the schedule couldn't be found, verify that the daemon is using the same `workspace.yaml` file as the webserver. The daemon does not need to restart in order to pick up changes to the `workspace.yaml` file. Refer to the [Workspace files documentation](/concepts/code-locations/workspace-files) for more info. - - If the logs don't indicate the cause of the issue, move on to step three. - -3. Lastly, double-check your schedule code: - - 1. In the UI, open the schedule's **Schedule details** page by clicking the schedule in the left sidenav. - 2. On this page, locate the latest tick for the schedule. - - If there was an error trying to submit runs for the schedule, a red **Failure** badge should display in the **Status** column. Click the badge to display an error and stack trace describing the execution failure. - -**Still stuck?** If these steps didn't resolve the issue, reach out in Slack or file an issue on GitHub. - ---- - -## See it in action - -For more examples of schedules, check out the following in our [Hacker News example](https://github.com/dagster-io/dagster/tree/master/examples/project_fully_featured): - -- [Defining a partitioned asset job and a schedule based on it](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py) diff --git a/docs/content/concepts/partitions-schedules-sensors/sensors.mdx b/docs/content/concepts/partitions-schedules-sensors/sensors.mdx index fca59d9aa9359..51a76317f6271 100644 --- a/docs/content/concepts/partitions-schedules-sensors/sensors.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/sensors.mdx @@ -43,7 +43,7 @@ To define a sensor, use the decorator. Th Let's say you have a job that logs a filename that is specified in the op configuration of the `process_file` op: ```python file=concepts/partitions_schedules_sensors/sensors/sensors.py startafter=start_sensor_job_marker endbefore=end_sensor_job_marker -from dagster import op, job, Config +from dagster import op, job, Config, OpExecutionContext class FileConfig(Config): @@ -97,8 +97,7 @@ Once a sensor is added to a object with the jo ```python file=concepts/partitions_schedules_sensors/sensors/sensors.py startafter=start_running_in_code endbefore=end_running_in_code @sensor(job=asset_job, default_status=DefaultSensorStatus.RUNNING) -def my_running_sensor(): - ... +def my_running_sensor(): ... ``` If you manually start or stop a sensor in the UI, that will override any default status that is set in code. @@ -250,8 +249,7 @@ class UsersAPI(ConfigurableResource): return requests.get(self.url).json() @job -def process_user(): - ... +def process_user(): ... @sensor(job=process_user) def process_new_users_sensor( @@ -629,15 +627,15 @@ def code_location_a_data_update_failure_sensor(): send_slack_alert() ``` -You can also monitor every job in your Dagster instance by specifying `monitor_all_repositories=True` on the sensor decorator. Note that `monitor_all_repositories` cannot be used along with jobs specified via `monitored_jobs`. +You can also monitor every job in your Dagster deployment by specifying `monitor_all_code_locations=True` on the sensor decorator. Note that `monitor_all_code_locations` cannot be used along with jobs specified via `monitored_jobs`. ```python file=/concepts/partitions_schedules_sensors/sensors/sensors.py startafter=start_instance_sensor endbefore=end_instance_sensor @run_status_sensor( - monitor_all_repositories=True, + monitor_all_code_locations=True, run_status=DagsterRunStatus.SUCCESS, ) -def instance_sensor(): - # when any job in the Dagster instance succeeds, this sensor will trigger +def sensor_monitor_all_code_locations(): + # when any job in the Dagster deployment succeeds, this sensor will trigger send_slack_alert() ``` diff --git a/docs/content/concepts/partitions-schedules-sensors/testing-partitions.mdx b/docs/content/concepts/partitions-schedules-sensors/testing-partitions.mdx index 84e051db4ce54..4d3f160baa080 100644 --- a/docs/content/concepts/partitions-schedules-sensors/testing-partitions.mdx +++ b/docs/content/concepts/partitions-schedules-sensors/testing-partitions.mdx @@ -7,8 +7,8 @@ description: Test your partition configuration and jobs. This guide is applicable to both{" "} - Software-defined Assets{" "} - and ops.{" "} + asset definitions and{" "} + ops.{" "} In this guide, we'll cover a few ways to test your partitioned config and jobs. @@ -52,8 +52,8 @@ def test_my_partitioned_config(): } # assert that the output of the decorated function is valid configuration for the - # do_stuff_partitioned job - assert validate_run_config(do_stuff_partitioned, run_config) + # partitioned_op_job job + assert validate_run_config(partitioned_op_job, run_config) ``` If you want to test that a creates the partitions you expect, use the `get_partition_keys` or `get_run_config_for_partition_key` functions: @@ -99,7 +99,7 @@ def test_my_offset_partitioned_config(): assert keys[0] == "2020-01-01" assert keys[1] == "2020-01-02" - # test that the run_config for a partition is valid for do_stuff_partitioned + # test that the run_config for a partition is valid for partitioned_op_job run_config = my_offset_partitioned_config.get_run_config_for_partition_key(keys[0]) assert validate_run_config(do_more_stuff_partitioned, run_config) @@ -120,8 +120,8 @@ def test_my_offset_partitioned_config(): To run a partitioned job in-process on a particular partition, supply a value for the `partition_key` argument of . For example: ```python file=/concepts/partitions_schedules_sensors/partitioned_job_test.py startafter=start endbefore=end -def test_do_stuff_partitioned(): - assert do_stuff_partitioned.execute_in_process(partition_key="2020-01-01").success +def test_partitioned_op_job(): + assert partitioned_op_job.execute_in_process(partition_key="2020-01-01").success ``` --- @@ -143,6 +143,6 @@ def test_do_stuff_partitioned(): > diff --git a/docs/content/concepts/repositories-workspaces/repositories.mdx b/docs/content/concepts/repositories-workspaces/repositories.mdx index 9bba74d7baf86..809340e3c70da 100644 --- a/docs/content/concepts/repositories-workspaces/repositories.mdx +++ b/docs/content/concepts/repositories-workspaces/repositories.mdx @@ -13,11 +13,11 @@ description: A repository is a collection of jobs, schedules, and sensor definit info. -A repository is a collection of software-defined assets, jobs, schedules, and sensors. Repositories are loaded as a unit by the Dagster CLI, Dagster webserver, and the Dagster daemon. +A repository is a collection of asset definitions, jobs, schedules, and sensors. Repositories are loaded as a unit by the Dagster CLI, Dagster webserver, and the Dagster daemon. A convenient way to organize your job and other definitions, each repository: -- Includes various definitions: [Software-defined assets](/concepts/assets/software-defined-assets), [Jobs](/concepts/ops-jobs-graphs/jobs), [Schedules](/concepts/partitions-schedules-sensors/schedules), and [Sensors](/concepts/partitions-schedules-sensors/sensors). +- Includes various definitions: [Asset definitions](/concepts/assets/software-defined-assets), [Jobs](/concepts/ops-jobs-graphs/jobs), [Schedules](/concepts/automation/schedules), and [Sensors](/concepts/partitions-schedules-sensors/sensors). - Is loaded in a different process than Dagster system processes like the webserver. Any communication between the Dagster system and repository code occurs over an RPC mechanism, ensuring that problems in repository code can't affect Dagster or other repositories. - Can be loaded in its own Python environment, so you can manage your dependencies (or even your own Python versions) separately. diff --git a/docs/content/concepts/resources-legacy.mdx b/docs/content/concepts/resources-legacy.mdx index d9d1ccd7090ef..c64d8ec3fad90 100644 --- a/docs/content/concepts/resources-legacy.mdx +++ b/docs/content/concepts/resources-legacy.mdx @@ -16,7 +16,7 @@ description: Resources enable you to separate graph logic from environment, and guide. -Resources are objects that are shared across the implementations of multiple [software-defined assets](/concepts/assets/software-defined-assets) and [ops](/concepts/ops-jobs-graphs/ops) and that can be plugged in after defining those ops and assets. +Resources are objects that are shared across the implementations of multiple [asset definitions](/concepts/assets/software-defined-assets) and [ops](/concepts/ops-jobs-graphs/ops) and that can be plugged in after defining those ops and assets. Resources typically model external components that assets and ops interact with. For example, a resource might be a connection to a data warehouse like Snowflake or a service like Slack. @@ -37,7 +37,7 @@ So, why use resources? | | The context object provided to a resource during initialization. This object contains required resource, config, and other run information. | | | Function for building an outside of execution, intended to be used when testing a resource. | | | Function for initializing a set of resources outside of the context of a job's execution. | -| | Function for providing resources to software-defined assets and source assets. | +| | Function for providing resources to asset definitions. | --- @@ -63,17 +63,17 @@ def cereal_fetcher(init_context): ## Using resources -- [With software-defined assets](#using-resources-with-software-defined-assets) +- [With asset definitions](#using-resources-with-asset-definitions) - [With ops](#using-resources-with-ops) -### Using resources with software-defined assets +### Using resources with asset definitions -- [Accessing resources](#accessing-resources-in-software-defined-assets) -- [Providing resources](#providing-resources-to-software-defined-assets) +- [Accessing resources](#accessing-resources-in-asset-definitions) +- [Providing resources](#providing-resources-to-asset-definitions) -#### Accessing resources in software-defined assets +#### Accessing resources in asset definitions -Software-defined assets use resource keys to access resources: +Asset definitions use resource keys to access resources: ```python file=/concepts/resources/resources.py startafter=start_asset_use_resource endbefore=end_asset_use_resource from dagster import asset, AssetExecutionContext @@ -84,14 +84,14 @@ def asset_requires_resource(context: AssetExecutionContext): do_something_with_resource(context.resources.foo) ``` -#### Providing resources to software-defined assets +#### Providing resources to asset definitions How resources are provided to assets depends on how you're organizing your code definitions in Dagster. -Resources can be provided to software-defined assets by passing them to a object. The resources provided to are automatically bound to the assets. +Resources can be provided to asset definitions by passing them to a object. The resources provided to are automatically bound to the assets. ```python file=/concepts/resources/resources.py startafter=start_asset_provide_resource endbefore=end_asset_provide_resource from dagster import Definitions @@ -138,7 +138,7 @@ def repo(): #### Accessing resources in ops -Like software-defined assets, ops use resource keys to access resources: +Like asset definitions, ops use resource keys to access resources: ```python file=/concepts/resources/resources.py startafter=start_op_with_resources_example endbefore=end_op_with_resources_example from dagster import op @@ -305,8 +305,7 @@ from dagster import resource, build_resources @resource -def the_credentials(): - ... +def the_credentials(): ... @resource(required_resource_keys={"credentials"}) diff --git a/docs/content/concepts/resources.mdx b/docs/content/concepts/resources.mdx index 1ddafa8dc4ba1..05f45c351d722 100644 --- a/docs/content/concepts/resources.mdx +++ b/docs/content/concepts/resources.mdx @@ -23,7 +23,7 @@ In data engineering, resources are the external services, tools, and storage you - The Snowflake/Databricks/BigQuery account the data is ingested into - The BI tool the dashboard was made in -Using Dagster resources, you can standardize connections and integrations to these tools across Dagster definitions like [Software-defined Assets](/concepts/assets/software-defined-assets), [schedules](/concepts/partitions-schedules-sensors/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), [ops](/concepts/ops-jobs-graphs/ops), and [jobs](/concepts/ops-jobs-graphs/jobs). +Using Dagster resources, you can standardize connections and integrations to these tools across Dagster definitions like [asset definitions](/concepts/assets/software-defined-assets), [schedules](/concepts/automation/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), [ops](/concepts/ops-jobs-graphs/ops), and [jobs](/concepts/ops-jobs-graphs/jobs). So, why use resources? @@ -44,7 +44,7 @@ So, why use resources? | | The context object provided to a resource during initialization. This object contains required resources, config, and other run information. | | | Function for building an outside of execution, intended to be used when testing a resource. | | | Function for initializing a set of resources outside of the context of a job's execution. | -| | Advanced API for providing resources to a specific set of Software-defined Assets and source assets, overriding those provided to . | +| | Advanced API for providing resources to a specific set of asset definitions, overriding those provided to . | --- @@ -58,7 +58,7 @@ The configuration system has a few advantages over plain Python parameter passin 2. Displayed in the Dagster UI 3. Set dynamically using environment variables, resolved at runtime -### With Software-defined Assets +### With asset definitions The following example demonstrates defining a subclass of that represents a connection to an external service. The resource can be configured by constructing it in the call. @@ -102,9 +102,9 @@ To specify resource dependencies on a sensor, annotate the resource type as a pa ### With schedules -[Schedules](/concepts/partitions-schedules-sensors/schedules) can use resources in case your schedule logic needs to interface with an external tool or to make your schedule logic more testable. +[Schedules](/concepts/automation/schedules) can use resources in case your schedule logic needs to interface with an external tool or to make your schedule logic more testable. -To specify resource dependencies on a schedule, annotate the resource type as a parameter to the schedule's function. For more information and examples, refer to the [Schedules documentation](/concepts/partitions-schedules-sensors/schedules#using-resources-in-schedules). +To specify resource dependencies on a schedule, annotate the resource type as a parameter to the schedule's function. Refer to the [Schedule examples reference](/concepts/automation/schedules/examples#using-resources-in-schedules) for more info. ### With ops and jobs @@ -154,7 +154,7 @@ There are many supported config types that can be used when defining resources. ### Using environment variables with resources -Resources can be configured using environment variables, which is useful for secrets or other environment-specific configuration. If you're using [Dagster Cloud](/dagster-cloud), environment variables can be [configured directly in the UI](/dagster-cloud/managing-deployments/environment-variables-and-secrets). +Resources can be configured using environment variables, which is useful for secrets or other environment-specific configuration. If you're using [Dagster+](/dagster-plus), environment variables can be [configured directly in the UI](/dagster-plus/managing-deployments/environment-variables-and-secrets). To use environment variables, pass an when constructing the resource. `EnvVar` inherits from `str` and can be used to populate any string config field on a resource. The value of the environment variable will be evaluated when a run is launched. @@ -186,7 +186,7 @@ For more information on using environment variables with Dagster, refer to the [ ### Configuring resources at launch time -In some cases, you may want to specify configuration for a resource at launch time, in the Launchpad or in a for a [schedule](/concepts/partitions-schedules-sensors/schedules) or [sensor](/concepts/partitions-schedules-sensors/sensors). For example, you may want a sensor-triggered run to specify a different target table in a database resource for each run. +In some cases, you may want to specify configuration for a resource at launch time, in the Launchpad or in a for a [schedule](/concepts/automation/schedules) or [sensor](/concepts/partitions-schedules-sensors/sensors). For example, you may want a sensor-triggered run to specify a different target table in a database resource for each run. You can use the `configure_at_launch()` method to defer the construction of a configurable resource until launch time: @@ -196,8 +196,7 @@ from dagster import ConfigurableResource, Definitions, asset class DatabaseResource(ConfigurableResource): table: str - def read(self): - ... + def read(self): ... @asset def data_from_database(db_conn: DatabaseResource): @@ -348,12 +347,10 @@ from pydantic import PrivateAttr class DBConnection: ... - def query(self, body: str): - ... + def query(self, body: str): ... @contextmanager -def get_database_connection(username: str, password: str): - ... +def get_database_connection(username: str, password: str): ... class MyClientResource(ConfigurableResource): username: str diff --git a/docs/content/concepts/testing.mdx b/docs/content/concepts/testing.mdx index 314dc41eca28b..98d866242d520 100644 --- a/docs/content/concepts/testing.mdx +++ b/docs/content/concepts/testing.mdx @@ -13,19 +13,8 @@ We believe the underlying fact is that data applications encode much of their bu This page demonstrates how Dagster addresses these challenges: -- It provides convenient ways to write [Unit Tests in Data Applications](#unit-tests-in-data-applications). -- It allows you to [Separate Business Logic from Environments](#separating-business-logic-from-environments) and, therefore, write lightweight tests. - ---- - -## Relevant APIs - -| Name | Description | -| ------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------- | -| | A method to execute a job synchronously, typically for testing job execution or running standalone scripts. | -| | A method to construct an , typically to provide to the invocation of an op for testing. | -| | A method to construct an , typically to provide to the invocation of an asset for testing. | -| | Ephemerally materializes a provided list of software defined assets for testing. | +- It provides convenient ways to write [unit tests in data applications](#unit-tests-in-data-applications) +- It allows you to [separate business logic from environments](#separating-business-logic-from-environments) and, therefore, write lightweight tests --- @@ -59,8 +48,6 @@ def test_job(): assert result.output_for_node("subtract") == -1 ``` -You can find more unit test examples in the [Examples](#examples) section below. - --- ## Separating business logic from environments @@ -94,8 +81,7 @@ from dagster import graph, op, ConfigurableResource, OpExecutionContext class MyApi(ConfigurableResource): - def call(self): - ... + def call(self): ... @op @@ -136,22 +122,177 @@ def run_in_prod(): download_job.execute_in_process() ``` -For more information, check out the [Resources](/concepts/resources) sections. +For more information, refer to the [Resources](/concepts/resources) documentation. + +--- + +## Testing asset definitions + + +-decorated functions can be directly invoked, which will invoke the underlying op +computation. + +A basic asset, with no dependencies: + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_basic_asset endbefore=end_test_basic_asset +from dagster import asset + + +@asset +def basic_asset(): + return 5 + + +# An example unit test for basic_asset. +def test_basic_asset(): + assert basic_asset() == 5 +``` + +An asset with dependencies: + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_input_asset endbefore=end_test_input_asset +from dagster import asset + + +@asset +def asset_with_inputs(x, y): + return x + y + + +# An example unit test for asset_with_inputs. +def test_asset_with_inputs(): + assert asset_with_inputs(5, 6) == 11 +``` + +### Testing assets with config + +If your asset has a config schema, you can pass a config value to the invocation. The following asset relies on attached config: + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_config_asset endbefore=end_test_config_asset +from dagster import asset, Config + + +class MyAssetConfig(Config): + my_string: str + + +@asset +def asset_requires_config(config: MyAssetConfig) -> str: + return config.my_string + + +def test_asset_requires_config(): + result = asset_requires_config(config=MyAssetConfig(my_string="foo")) + ... +``` + +### Testing assets with resources + +If your asset requires resources, you can specify them as arguments when invoking the asset directly. + +Consider the following asset, which requires a resource `bar`. + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_resource_asset endbefore=end_test_resource_asset +from dagster import asset, ConfigurableResource + + +class BarResource(ConfigurableResource): + my_string: str + + +@asset +def asset_requires_bar(bar: BarResource) -> str: + return bar.my_string + + +def test_asset_requires_bar(): + result = asset_requires_bar(bar=BarResource(my_string="bar")) + ... +``` + +#### Testing assets with complex resources + +In order to test assets which rely on complex resources, such as those that build separate clients, a common pattern is to use tools such as the `mock` library to fake your resource and associated client. See the section on [Testing ops with complex resources](#testing-ops-with-complex-resources) for an example. + +### Testing multiple assets together + +You may want to test multiple assets together, to more closely mirror actual materialization. This can be done using the method, which loads the materialized results of assets into memory: + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_materialize_asset endbefore=end_materialize_asset +from dagster import asset, materialize_to_memory + + +@asset +def data_source(): + return get_data_from_source() + + +@asset +def structured_data(data_source): + return extract_structured_data(data_source) + + +# An example unit test using materialize_to_memory +def test_data_assets(): + result = materialize_to_memory([data_source, structured_data]) + assert result.success + # Materialized objects can be accessed in terms of the underlying op + materialized_data = result.output_for_node("structured_data") + ... +``` + +Mock resources can be provided directly using `materialize_to_memory`: + +```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_materialize_resources endbefore=end_materialize_resources +from dagster import asset, materialize_to_memory, ConfigurableResource +import mock + + +class MyServiceResource(ConfigurableResource): ... + + +@asset +def asset_requires_service(service: MyServiceResource): ... + + +@asset +def other_asset_requires_service(service: MyServiceResource): ... + + +def test_assets_require_service(): + # Mock objects can be provided directly. + result = materialize_to_memory( + [asset_requires_service, other_asset_requires_service], + resources={"service": mock.MagicMock()}, + ) + assert result.success + ... +``` --- -## Examples +## Testing asset checks + +Functions decorated with can be directly invoked. For example: + +```python file=/concepts/assets/asset_checks/test_asset_check.py startafter=start endbefore=end +import pandas as pd -Check out the following test examples: +from dagster import AssetCheckResult, asset_check -- [Ops](#testing-ops) -- [Software-defined assets](#testing-software-defined-assets) -- [Multiple software-defined assets defined together](#testing-multiple-software-defined-assets-together) -- [Job execution with config](#testing-job-execution-with-config) -- [Event stream](#testing-event-stream) -- [Jobs with top-level inputs](#testing-jobs-with-top-level-inputs) -### Testing ops +@asset_check(asset=orders) +def orders_id_has_no_nulls(): + return AssetCheckResult(passed=True) + + +def test_orders_check(): + assert orders_id_has_no_nulls().passed +``` + +--- + +## Testing ops While using the decorator on a function does change its signature, the invocation mirrors closely the underlying decorated function. @@ -185,7 +326,7 @@ def test_inputs_op_with_invocation(): assert my_op_with_inputs(5, 6) == 11 ``` -#### Testing ops with config +### Testing ops with config If your op has a config schema, you can pass a config value to the invocation. The following op relies on attached config: @@ -209,7 +350,7 @@ def test_op_with_config(): assert op_requires_config(MyOpConfig(my_int=5)) == 10 ``` -#### Testing ops with resources +### Testing ops with resources If your op requires resources, you can specify them as arguments when invoking the op. @@ -237,7 +378,7 @@ def test_op_with_resource(): Note that when directly invoking ops, I/O managers specified on inputs and outputs are not used. -#### Testing ops with complex resources +### Testing ops with complex resources If your ops rely on more complex resources, such as those that build separate clients, a common pattern is to use tools such as the `mock` library to fake your resource and associated client. @@ -248,8 +389,7 @@ import mock class MyClient: ... - def query(self, body: str): - ... + def query(self, body: str): ... class MyClientResource(ConfigurableResource): username: str @@ -274,7 +414,7 @@ def test_my_op(): assert my_op(mocked_client_resource) == "my_result" ``` -#### Testing ops that rely on context +### Testing ops that rely on context Finally, if your op relies on context, you can build and pass a context. The following op uses the context logger: @@ -292,149 +432,9 @@ def test_op_with_context(): context_op(context) ``` -### Testing software-defined assets - -Similar to ops, -decorated functions can be directly invoked. Doing so invokes the underlying op computation. - -A basic asset, with no dependencies: - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_basic_asset endbefore=end_test_basic_asset -from dagster import asset - - -@asset -def basic_asset(): - return 5 - - -# An example unit test for basic_asset. -def test_basic_asset(): - assert basic_asset() == 5 -``` - -An asset with dependencies: - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_input_asset endbefore=end_test_input_asset -from dagster import asset - - -@asset -def asset_with_inputs(x, y): - return x + y - - -# An example unit test for asset_with_inputs. -def test_asset_with_inputs(): - assert asset_with_inputs(5, 6) == 11 -``` - -#### Testing assets with config - -If your asset has a config schema, you can pass a config value to the invocation. The following asset relies on attached config: - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_config_asset endbefore=end_test_config_asset -from dagster import asset, Config - - -class MyAssetConfig(Config): - my_string: str - - -@asset -def asset_requires_config(config: MyAssetConfig) -> str: - return config.my_string - - -def test_asset_requires_config(): - result = asset_requires_config(config=MyAssetConfig(my_string="foo")) - ... -``` - -#### Testing assets with resources - -If your asset requires resources, you can specify them as arguments when invoking the asset directly. - -Consider the following asset, which requires a resource `bar`. - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_test_resource_asset endbefore=end_test_resource_asset -from dagster import asset, ConfigurableResource - - -class BarResource(ConfigurableResource): - my_string: str - - -@asset -def asset_requires_bar(bar: BarResource) -> str: - return bar.my_string - - -def test_asset_requires_bar(): - result = asset_requires_bar(bar=BarResource(my_string="bar")) - ... -``` - -#### Testing assets with complex resources - -In order to test assets which rely on complex resources, such as those that build separate clients, a common pattern is to use tools such as the `mock` library to fake your resource and associated client. See the section on [Testing ops with complex resources](#testing-ops-with-complex-resources) for an example. - -### Testing multiple software-defined assets together - -You may want to test multiple assets together, to more closely mirror actual materialization. This can be done using the method, which loads the materialized results of assets into memory: - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_materialize_asset endbefore=end_materialize_asset -from dagster import asset, materialize_to_memory - - -@asset -def data_source(): - return get_data_from_source() - - -@asset -def structured_data(data_source): - return extract_structured_data(data_source) - - -# An example unit test using materialize_to_memory -def test_data_assets(): - result = materialize_to_memory([data_source, structured_data]) - assert result.success - # Materialized objects can be accessed in terms of the underlying op - materialized_data = result.output_for_node("structured_data") - ... -``` - -Mock resources can be provided directly using `materialize_to_memory`: - -```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_materialize_resources endbefore=end_materialize_resources -from dagster import asset, materialize_to_memory, ConfigurableResource -import mock - - -class MyServiceResource(ConfigurableResource): - ... - - -@asset -def asset_requires_service(service: MyServiceResource): - ... - - -@asset -def other_asset_requires_service(service: MyServiceResource): - ... +--- - -def test_assets_require_service(): - # Mock objects can be provided directly. - result = materialize_to_memory( - [asset_requires_service, other_asset_requires_service], - resources={"service": mock.MagicMock()}, - ) - assert result.success - ... -``` +## Testing jobs ### Testing job execution with config @@ -461,7 +461,43 @@ def test_job_with_config(): assert result.output_for_node("subtract") == -2 ``` -### Testing event stream +### Testing jobs with top-level inputs + +You can wire inputs from the top-level of a job to the constituent ops. Consider the following op and graph: + +```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_top_level_input_graph endbefore=end_top_level_input_graph +from dagster import graph, op + + +@op +def op_with_input(x): + return do_something(x) + + +@graph +def wires_input(x): + op_with_input(x) +``` + +Turn the graph into a job by calling , and provide a value to the input `x` using the `input_values` argument: + +```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_top_level_input_job endbefore=end_top_level_input_job +the_job = wires_input.to_job(input_values={"x": 5}) +``` + +You can also provide input values using or : + +```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_execute_in_process_input endbefore=end_execute_in_process_input +graph_result = wires_input.execute_in_process(input_values={"x": 5}) + +job_result = the_job.execute_in_process( + input_values={"x": 6} +) # Overrides existing input value +``` + +--- + +## Testing the event stream The event stream is the most generic way that an op communicates what happened during its computation. Ops communicate events for starting, input/output type checking, and user-provided events such as expectations, materializations, and outputs. @@ -505,36 +541,44 @@ def test_event_stream(): assert materialization.label == "persisted_string" ``` -### Testing jobs with top-level inputs - -You can wire inputs from the top-level of a job to the constituent ops. Consider the following op and graph: - -```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_top_level_input_graph endbefore=end_top_level_input_graph -from dagster import graph, op - - -@op -def op_with_input(x): - return do_something(x) - - -@graph -def wires_input(x): - op_with_input(x) -``` +--- -Turn the graph into a job by calling , and provide a value to the input `x` using the `input_values` argument: +## APIs in this guide -```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_top_level_input_job endbefore=end_top_level_input_job -the_job = wires_input.to_job(input_values={"x": 5}) -``` - -You can also provide input values using or : +| Name | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------- | +| | A method to execute a job synchronously, typically for testing job execution or running standalone scripts. | +| | A method to construct an , typically to provide to the invocation of an op for testing. | +| | A method to construct an , typically to provide to the invocation of an asset for testing. | +| | Ephemerally materializes a provided list of assets for testing. | -```python file=/concepts/ops_jobs_graphs/jobs.py startafter=start_execute_in_process_input endbefore=end_execute_in_process_input -graph_result = wires_input.execute_in_process(input_values={"x": 5}) +--- -job_result = the_job.execute_in_process( - input_values={"x": 6} -) # Overrides existing input value -``` +## Related + + + + + + + + + diff --git a/docs/content/concepts/types.mdx b/docs/content/concepts/types.mdx index 3794e0bdf6c28..bcad2c69bd369 100644 --- a/docs/content/concepts/types.mdx +++ b/docs/content/concepts/types.mdx @@ -5,7 +5,7 @@ description: The Dagster type system helps you describe what kind of values your # Dagster Types -The Dagster type system helps you describe what kind of values your software-defined assets and ops accept and produce. +The Dagster type system helps you describe what kind of values your asset definitions and ops accept and produce. ## Relevant APIs diff --git a/docs/content/concepts/webserver/graphql-client.mdx b/docs/content/concepts/webserver/graphql-client.mdx index 15244000bf0de..7b3f242d1bf81 100644 --- a/docs/content/concepts/webserver/graphql-client.mdx +++ b/docs/content/concepts/webserver/graphql-client.mdx @@ -55,11 +55,11 @@ from dagster_graphql import DagsterGraphQLClient client = DagsterGraphQLClient("localhost", port_number=3000) ``` -If you are using Dagster Cloud, you can configure your client against the Dagster Cloud API by passing your deployment-specific URL and a User Token to the client as follows: +If you are using Dagster+, you can configure your client against the Dagster+ API by passing your deployment-specific URL and a User Token to the client as follows: ```python file=/concepts/webserver/graphql/client_example.py startafter=start_cloud_usage endbefore=end_cloud_usage url = "yourorg.dagster.cloud/prod" # Your deployment-scoped url -user_token = ( # a User Token generated from the Organization Settings page in Dagster Cloud. +user_token = ( # a User Token generated from the Organization Settings page in Dagster+. "your_token_here" ) client = DagsterGraphQLClient(url, headers={"Dagster-Cloud-Api-Token": user_token}) diff --git a/docs/content/concepts/webserver/graphql.mdx b/docs/content/concepts/webserver/graphql.mdx index ac197689edf2b..a5f765e33275a 100644 --- a/docs/content/concepts/webserver/graphql.mdx +++ b/docs/content/concepts/webserver/graphql.mdx @@ -71,39 +71,14 @@ Dagster also provides a Python client to interface with Dagster's GraphQL API fr ### Get a list of Dagster runs - - -To retrieve a list of all runs, use the `runsOrError` query. - -```shell -query RunsQuery { - runsOrError { - __typename - ... on Runs { - results { - runId - jobName - status - runConfigYaml - startTime - endTime - } - } - } -} -``` - ---- - - You may eventually accumulate too many runs to return in one query. The `runsOrError` query takes in optional `cursor` and `limit` arguments for pagination: ```shell -query PaginatedRunsQuery { +query PaginatedRunsQuery($cursor: String) { runsOrError( - cursor: "7fd2e5ef-5591-43db-be15-1ebbbbed8bb5" + cursor: $cursor limit: 10 ) { __typename @@ -136,8 +111,12 @@ The `runsOrError` query also takes in an optional filter argument, of type `Runs For example, the following query will return all failed runs: ```shell -query FilteredRunsQuery { - runsOrError(filter: { statuses: [FAILURE] }) { +query FilteredRunsQuery($cursor: String) { + runsOrError( + filter: { statuses: [FAILURE] } + cursor: $cursor + limit: 10 + ) { __typename ... on Runs { results { diff --git a/docs/content/concepts/webserver/ui-user-settings.mdx b/docs/content/concepts/webserver/ui-user-settings.mdx new file mode 100644 index 0000000000000..9735e0854e171 --- /dev/null +++ b/docs/content/concepts/webserver/ui-user-settings.mdx @@ -0,0 +1,25 @@ +--- +title: "Managing user settings in the Dagster UI | Dagster Docs" +description: "Manage your user settings in the Dagster UI." +--- + +# Managing your user settings & preferences + +The **User settings** page in the [Dagster UI](/concepts/webserver/ui) allows you to define settings like your timezone and theme and enable experimental features. + +--- + +## Accessing your settings & preferences + +To access your settings and preferences: + +- **In Dagster Open Source (OSS)**: Click the **gear icon** in the upper right corner of the UI +- **In Dagster+**: Click the **user menu (your icon) > User Settings** in the upper right corner of the UI + +A window will display where you can define settings. + +--- + +## Experimental feature settings + +Use the toggles next to the features in the **Experimental features** section of your **User settings** to enable and disable new features. We'd love your feedback! diff --git a/docs/content/concepts/webserver/ui.mdx b/docs/content/concepts/webserver/ui.mdx index 27615e609c641..09da6f3b1670d 100644 --- a/docs/content/concepts/webserver/ui.mdx +++ b/docs/content/concepts/webserver/ui.mdx @@ -59,7 +59,9 @@ height={1010} ### Asset catalog -- **Description**: The **Asset catalog** page lists all [assets](/concepts/assets/software-defined-assets) in your Dagster deployment, which can be filtered by asset key and/or [asset group](/concepts/assets/software-defined-assets#grouping-assets). Clicking an asset opens the [**Asset details** page](#asset-details) for that asset. You can also navigate to the [Global asset lineage](#global-asset-lineage) page, reload definitions, and materialize assets. +- **Description**: The **Asset catalog** page lists all [assets](/concepts/assets/software-defined-assets) in your Dagster deployment, which can be filtered by asset key, compute kind, [asset group](/concepts/assets/software-defined-assets#grouping-assets), [code location](/concepts/code-locations), and [tags](/concepts/metadata-tags/tags). Clicking an asset opens the [**Asset details** page](#asset-details) for that asset. You can also navigate to the [Global asset lineage](#global-asset-lineage) page, reload definitions, and materialize assets. + + **If using Dagster+ Pro**, you have the option to use a new version of this page. Click the **Asset catalog (Dagster+ Pro)** tab for more information. - **Accessed by:** Clicking **Assets** in the top navigation bar @@ -68,8 +70,58 @@ height={1010} The Asset Catalog page in the Dagster UI + + + + +### Asset catalog (Dagster+ Pro) + +A Dagster+ Pro plan is required to use this feature. + +- **Description**: This version of the **Asset catalog** page includes all the information and functionality of the original page, broken out by compute kind, [asset group](/concepts/assets/software-defined-assets#grouping-assets), [code location](/concepts/code-locations), [tags](/concepts/metadata-tags/tags), and [owners](/concepts/metadata-tags/asset-metadata#asset-owners), etc. On this page, you can: + + - View all [assets](/concepts/assets/software-defined-assets) in your Dagster deployment + - View [details](#asset-details) about a specific asset by clicking on it + - Search assets by asset key, compute kind, asset group, code location, tags, owners, etc. + - Access the [Global asset lineage](#global-asset-lineage) + - Reload definitions + +- **Accessed by:** Clicking **Catalog** in the top navigation + + + + + + + + +### Catalog views (Dagster+) + +- **Description**: **Catalog views** save a set of filters against the **Asset catalog** to show only the assets you want to see. You can share these views for easy access and faster team collaboration. With **Catalog views**, you can: + + - Filter for a scoped set of [assets](/concepts/assets/software-defined-assets) in your Dagster deployment + - Create shared views of assets for easier team collaboration + +- **Accessed by:** + + - Clicking **Catalog** in the top navigation + - **From the [Global asset lineage](#global-asset-lineage)**: Clicking **View global asset lineage**, located near the top right corner of the **Catalog** page + + + + @@ -95,8 +147,8 @@ height={1944} The Global asset lineage page in the Dagster UI @@ -104,13 +156,15 @@ height={1944} ### Asset details -- **Description**: The **Asset details** page contains details about a single asset. On this page, you can: +- **Description**: The **Asset details** page contains details about a single asset. Use the tabs on this page to view detailed information about the asset: - - View the asset's **Event** (materialization) history - - View any **Plots** associated with the asset - - View the asset's **Definition** - - View the asset's **Lineage** in the [**Global asset lineage** page](#global-asset-lineage) - - Materialize the asset + - **Overview** - Information about the asset such as its description, resources, config, type, etc. + - **Partitions** - The asset's partitions, including their materialization status, metadata, and run information + - **Events** - The asset's materialization history + - **Checks** - The [Asset checks](/concepts/assets/asset-checks) defined for the asset + - **Lineage** - The asset's lineage in the [**Global asset lineage** page](#global-asset-lineage) + - **Automation** - The [Declarative Automation conditions](/concepts/automation/declarative-automation) associated with the asset + - **Insights** - **Dagster+ only.** Historical information about the asset, such as failures and credit usage. Refer to the [Dagster+ Insights](/dagster-plus/insights) documentation for more information. - **Accessed by**: Clicking an asset in the [**Asset catalog**](#asset-catalog) @@ -119,8 +173,8 @@ height={1944} The Asset Details page in the Dagster UI @@ -225,7 +279,7 @@ height={826} ### All schedules -- **Description**: The **Schedules** page lists all [schedules](/concepts/partitions-schedules-sensors/schedules) defined in your Dagster deployment, as well as information about upcoming ticks for anticipated scheduled runs. Click a schedule to open the [**Schedule details**](#schedule-details) page. +- **Description**: The **Schedules** page lists all [schedules](/concepts/automation/schedules) defined in your Dagster deployment, as well as information about upcoming ticks for anticipated scheduled runs. Click a schedule to open the [**Schedule details**](#schedule-details) page. - **Accessed by**: Clicking **Overview (top nav) > Schedules tab** @@ -574,24 +628,24 @@ height={1942} - + -### Cloud +### Dagster+ -In addition to the [**Code locations** tab](#code-locations-tab), Dagster Cloud deployments contain a few additional tabs. Click the tabs below for more information. +In addition to the [**Code locations** tab](#code-locations-tab), Dagster+ deployments contain a few additional tabs. Click the tabs below for more information. #### Agents tab -- **Description**: The **Agents** tab contains information about the agents in a Dagster Cloud deployment. Refer to the [Dagster Cloud agent documentation](/dagster-cloud/deployment/agents) for more info. +- **Description**: The **Agents** tab contains information about the agents in a Dagster+ deployment. Refer to the [Dagster+ agent documentation](/dagster-plus/deployment/agents) for more info. - **Accessed by**: On the [**Deployment overview**](#code-locations-tab) page, clicking the **Agents** tab - + + - - - - -### Managing Hybrid Deployment agents - -Hybrid Deployments use an agent that is responsible for executing your code. Learn how to spin up and maintain an agent in your infrastructure. - - - - - - - - - - ---- - -## Managing organization settings - - - - - ---- - -## Managing authentication and users - -Learn how to manage users and permissions in Dagster Cloud. You can secure your account using our out-of-the-box support for Google and GitHub SSO, or take full control with SAML SSO. - - - - - - - - - - ---- - -## Managing deployments - -Learn how to deploy your code to Dagster Cloud, use command line tools, set up CI/CD, and define environment variables. - - - - - - - - - -### Environment variables and secrets - - - - - - -### Branch deployments - - - - - - diff --git a/docs/content/dagster-cloud/account.mdx b/docs/content/dagster-cloud/account.mdx deleted file mode 100644 index fe4e1479afd05..0000000000000 --- a/docs/content/dagster-cloud/account.mdx +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: "Managing your Dagster Cloud account | Dagster Docs" - -platform_type: "cloud" ---- - -# Managing Dagster Cloud organization settings - -Learn to manage your Dagster Cloud account. - - - - diff --git a/docs/content/dagster-cloud/account/authentication.mdx b/docs/content/dagster-cloud/account/authentication.mdx deleted file mode 100644 index 4582d59738437..0000000000000 --- a/docs/content/dagster-cloud/account/authentication.mdx +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: "Dagster Cloud authentication and users | Dagster Docs" ---- - -# Dagster Cloud authentication and user management - -In this guide, we'll cover Dagster Cloud's supported authentication methods and some resources for managing users in your organization. - ---- - -## Authentication methods - -### Single Sign-on - -Dagster Cloud supports Single Sign-on (SSO) with the following providers: - -- **Google**. Users must be [added in Dagster Cloud](/dagster-cloud/account/managing-users#adding-users) before they'll be able to log in using their Google accounts. -- GitHub - -### SAML - -To provide administrators with more control, Dagster Cloud also supports SAML authentication with the following Identity Providers (IdP): - - - - - - - - - -**Note**: Users must be assigned to the Dagster app in the IdP to be able to log in to Dagster Cloud. Refer to the setup guide for your IdP for more info. - -### SCIM provisioning - -The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. When enabled in Dagster Cloud alongside SSO, SCIM allows you to efficiently and easily manage users in your IdP and sync their information to Dagster Cloud. - - - - - - ---- - -## User management and role-based access control - -Role-based access control (RBAC) enables you to grant specific permissions to users in your organization, ensuring that Dagster users have access to what they require in Dagster Cloud, and no more. - - - - - - diff --git a/docs/content/dagster-cloud/account/authentication/okta/saml-sso.mdx b/docs/content/dagster-cloud/account/authentication/okta/saml-sso.mdx deleted file mode 100644 index c046e5573cd7a..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/okta/saml-sso.mdx +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: "Setting up Okta SSO for Dagster Cloud | Dagster Docs" - -display_name: "Okta" -feature_name: "saml_sso_okta" -pricing_plan: "enterprise" ---- - -# Setting up Okta SSO for Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure Okta to use single sign-on (SSO) with your Dagster Cloud organization. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **An existing Okta account** -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)** -- **The following in Dagster Cloud:** - - An Enterprise plan - - [Access to a user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Add the Dagster Cloud app in Okta - -1. Sign in to your Okta Admin Dashboard. - -2. Using the sidebar, click **Applications > Applications**. - -3. On the **Applications** page, click **Browse App Catalog**. - - Okta Browse App Catalog - -4. On the **Browse App Integration Catalog** page, search for `Dagster Cloud`: - - Okta App Integration Catalog - -5. Add and save the application. - ---- - -## Step 2: Configure SSO in Okta - -1. In Okta, open the application and navigate to its **General Settings**. - -2. In the **Subdomain** field, enter your Dagster Cloud organization name. This is used to route the SAML response to the correct Dagster Cloud subdomain. - - In the following example, the organization name is `hooli` and our Dagster Cloud domain is `https://hooli.dagster.cloud`. To configure this correctly, we'd enter `hooli` into the **Subdomain** field: - - Okta Subdomain Configuration - -3. When finished, click **Done**. - ---- - -## Step 3: Upload the SAML metadata to Dagster Cloud - -Next, you'll save and upload the application's SAML metadata to Dagster Cloud. This will enable single sign-on. - -1. In Okta, navigate to the Dagster Cloud application. - -2. Navigate to **Sign On**. - -3. Click **Identity Provider metadata** to initiate a download. This will save the SAML metadata file to your computer. - - Okta Save Identity Provider Metadata - -4. After you've downloaded the SAML metadata file, upload it to Dagster Cloud using the `dagster-cloud` CLI: - - ```shell - dagster-cloud organization settings saml upload-identity-provider-metadata \ - --api-token= \ - --url https://.dagster.cloud - ``` - ---- - -## Step 4: Grant access to users - -Next, you'll assign users to the Dagster Cloud application in Okta. This will allow them to log in using their Okta credentials when the single sign-on flow is initiated. - -1. In the Dagster Cloud application, navigate to **Assignments**. -2. Click **Assign > Assign to People**. -3. For each user you want to have access to Dagster Cloud, click **Assign** then **Save and Go Back**. - ---- - -## Step 5: Test your SSO configuration - -Lastly, you'll test your SSO configuration: - -- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) -- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) - -### Testing a service provider-initiated login - -1. Navigate to your Dagster Cloud sign in page at `https://.dagster.cloud` - -2. Click the **Sign in with SSO** button. - -3. Initiate the login flow and address issues that arise, if any. - -### Testing an identity provider-initiated login - -In the Okta **Applications** page, click the **Dagster Cloud** icon: - - - -If successful, you'll be automatically signed into your Dagster Cloud organization. diff --git a/docs/content/dagster-cloud/account/authentication/okta/scim-provisioning.mdx b/docs/content/dagster-cloud/account/authentication/okta/scim-provisioning.mdx deleted file mode 100644 index 6a2e6e472f32e..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/okta/scim-provisioning.mdx +++ /dev/null @@ -1,194 +0,0 @@ ---- -title: "Setting up Okta SCIM provisioning for Dagster Cloud | Dagster Docs" - -display_name: "Okta" -feature_name: "scim_okta" -pricing_plan: "enterprise" ---- - -# Setting up Okta SCIM provisioning for Dagster Cloud - -The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. When enabled in Dagster Cloud, SCIM allows you to efficiently and easily manage users in your Identity Provider (IdP) - in this case, Okta - and sync their information to Dagster Cloud. - -In this guide, we'll walk you through configuring [Okta SCIM provisioning](https://developer.okta.com/docs/concepts/scim/) for Dagster Cloud. - ---- - -## About this feature - - - - -### Supported features - -With Dagster Cloud's Okta SCIM provisioning feature, you can: - - - -Refer to [Okta's SCIM documentation](https://developer.okta.com/docs/concepts/scim/) for more information about Okta's SCIM offering. - - - - -### Limitations - -Dagster Cloud currently supports the following attributes for SCIM syncing: - -- `user.firstName` -- `user.lastName` -- `user.email`, which must match the user's username in Okta -- `user.displayName` - - - - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **To have set up Okta SSO for Dagster Cloud.** Refer to the [Okta SSO setup guide](/dagster-cloud/account/authentication/okta/saml-sso) for more info. -- **Permissions in Okta that allow you to configure applications.** -- **The following in Dagster Cloud:** - - An Enterprise plan - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Enable SCIM provisioning in Dagster Cloud - -1. Sign in to your Dagster Cloud account. -2. Click the **user menu (your icon) > Organization Settings**. -3. Click the **Provisioning** tab. -4. If SCIM provisioning isn't enabled, click the **Enable SCIM provisioning** button to enable it. -5. Click **Create SCIM token** to create an API token. This token will be used to authenticate requests from Okta to Dagster Cloud. - -Keep the API token handy - you'll need it in the next step. - ---- - -## Step 2: Enable SCIM provisioning in Okta - -1. Sign in to your Okta Admin Dashboard. - -2. Using the sidebar, click **Applications > Applications**. - -3. Click the Dagster Cloud app. **Note**: If you haven't set up SSO for Okta, [follow this guide](/dagster-cloud/account/authentication/okta/saml-sso)) to do so before continuing. - -4. Click the **Sign On** tab and complete the following: - - 1. Click **Edit**. - - 2. In the **Advanced Sign-on Settings** section, enter the name of your organization in the **Organization** field. - - 3. In the **Credential Details** section, set the **Application username format** field to **Email**: - - Configured Sign On tab of Dagster Cloud Okta application - - 4. Click **Save**. - -5. Click the **Provisioning** tab and complete the following: - - 1. Click **Configure API Integration**. - - 2. Check the **Enable API integration** checkbox that displays. - - 3. In the **API Token** field, paste the Dagster Cloud API token you generated in [Step 1](#step-1-enable-scim-provisioning-in-dagster-cloud): - - Configured Provisioning tab of Dagster Cloud Okta application - - 4. Click **Test API Credentials** to verify that your organization and API token work correctly. - - 5. When finished, click **Save**. - ---- - -## Step 3: Enable user syncing in Okta - -After you confirm that your API credentials work in the Dagster Cloud Okta application, you can enable user syncing: - -1. In the Dagster Cloud Okta app, click the **Provisioning** tab. - -2. In the **Settings** panel, click **To App**. - -3. Click **Edit**. - -4. Next to **Create Users**, check the **Enable** checkbox: - - Highlighted Create users setting and default username setting in Okta - - **Note**: The default username used to create accounts must be set to **Email** or user provisioning may not work correctly. - -5. Optionally, check **Enable** next to **Update User Attributes** and **Deactivate Users** to enable these features. - -6. When finished, click **Save**. - ---- - -## Step 4: Enable group syncing in Okta - - - This step is required only if you want to sync Okta user groups to Dagster - Cloud as{" "} - Teams. - - -When **Push groups** is enabled in Okta, you can sync user groups from Okta to Dagster Cloud as [Teams](/dagster-cloud/account/managing-users/managing-teams). Refer to the [Okta documentation](https://help.okta.com/oie/en-us/Content/Topics/users-groups-profiles/usgp-enable-group-push.htm) for setup instructions. - ---- - -## Next steps - -That's it! Once Okta successfully syncs users to Dagster Cloud, synced users will have a 'synced' icon next to them in the Dagster Cloud users page: - - - -Refer to the [Utilizing SCIM provisioning guide](/dagster-cloud/account/authentication/utilizing-scim-provisioning) for more info about how user and team management works when SCIM provisioning is enabled. - ---- - -## Related - - - - - - - - diff --git a/docs/content/dagster-cloud/account/authentication/setting-up-azure-ad-saml-sso.mdx b/docs/content/dagster-cloud/account/authentication/setting-up-azure-ad-saml-sso.mdx deleted file mode 100644 index 19341a70199d3..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/setting-up-azure-ad-saml-sso.mdx +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: "Setting up Azure Active Directory SSO for Dagster Cloud | Dagster Docs" - -platform_type: "cloud" -display_name: "Azure AD" -feature_name: "saml_sso_azure" -pricing_plan: "enterprise" ---- - -# Setting up Azure Active Directory SSO for Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure Azure Active Directory (AD) to use single sign-on (SSO) with your Dagster Cloud organization. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **An existing Azure AD account** -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)** -- **The following in Dagster Cloud:** - - [Access to a user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Add the Dagster Cloud app in Azure AD - -In this step, you'll add the Dagster Cloud app to your list of managed SaaS apps in Azure AD. - -1. Sign in to the Azure portal. -2. On the left navigation pane, click the **Azure Active Directory** service. -3. Navigate to **Enterprise Applications** and then **All Applications**. -4. Click **New application**. -5. In the **Add from the gallery** section, type **Dagster Cloud** in the search box. -6. Select **Dagster Cloud** from the results panel and then add the app. Wait a few seconds while the app is added to your tenant. - ---- - -## Step 2: Configure SSO in Azure AD - -In this step, you'll configure and enable SSO for Azure AD in your Azure portal. - -1. On the **Dagster Cloud** application integration page, locate the **Manage** section and select **single sign-on**. - -2. On the **Select a single sign-on method** page, select **SAML**. - -3. On the **Set up single sign-on with SAML** page, click the pencil icon for **Basic SAML Configuration** to edit the settings. - - Settings Dropdown - -4. In the **Basic SAML Configuration** section, fill in the **Identifier** and **Reply URL** fields as follows: - - Copy and paste the following URL, replacing `` with your Dagster Cloud organization name: - - https://.dagster.cloud/auth/saml/consume - -5. Click **Set additional URLs**. - -6. In the **Sign-on URL** field, copy and paste the URL you entered in the **Identifier** and **Reply URL** fields. - -7. Next, you'll configure the SAML assertions. In addition to the default attributes, Dagster Cloud requires the following: - - - `FirstName` - `user.givenname` - - `LastName` - `user.surname` - - `Email` - `user.userprincipalname` - - Add these attribute mappings to the SAML assertion. - -8. On the **Set up single sign-on with SAML** page: - - 1. Locate the **SAML Signing Certificate** section. - - 2. Next to **Federation Metadata XML**, click **Download**: - - Download SAML Certificate - - When prompted, save the SAML metadata file to your computer. - ---- - -## Step 3: Upload the SAML metadata to Dagster Cloud - -After you've downloaded the SAML metadata file, upload it to Dagster Cloud using the `dagster-cloud` CLI: - -```shell -dagster-cloud organization settings saml upload-identity-provider-metadata \ - --api-token= \ - --url https://.dagster.cloud -``` - ---- - -## Step 4: Create a test user - -In this section, you'll create a test user in the Azure portal. - -1. From the left pane in the Azure portal, click **Azure Active Directory**. -2. Click **Users > All users**. -3. Click **New user** at the top of the screen. -4. In **User** properties, fill in the following fields: - - **Name**: Enter `B.Simon`. - - **User name**: Enter `B.Simon@contoso.com`. - - Select the **Show password** check box and write down the value displayed in the **Password** box. -5. Click **Create**. - ---- - -## Step 5: Test your SSO configuration - -Lastly, you'll test your SSO configuration: - -- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) -- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) - -### Testing a service provider-initiated login - -1. Navigate to your Dagster Cloud sign in page at `https://.dagster.cloud` - -2. Click the **Sign in with SSO** button. - -3. Initiate the login flow and address issues that arise, if any. - -### Testing an identity provider-initiated login - -Click **Test this application** in the Azure portal. If successful, you'll be automatically signed into your Dagster Cloud organization. diff --git a/docs/content/dagster-cloud/account/authentication/setting-up-google-workspace-saml-sso.mdx b/docs/content/dagster-cloud/account/authentication/setting-up-google-workspace-saml-sso.mdx deleted file mode 100644 index 1f952ba7cbe04..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/setting-up-google-workspace-saml-sso.mdx +++ /dev/null @@ -1,183 +0,0 @@ ---- -title: "Setting up Google Workspace SSO for Dagster Cloud | Dagster Docs" - -platform_type: "cloud" -display_name: "Google Workspace" -feature_name: "saml_sso_google" -pricing_plan: "enterprise" ---- - -# Setting up Google Workspace SSO for Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure Google Workspace to use single sign-on (SSO) with your Dagster Cloud organization. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **The following in Google**: - - An existing Google account - - [Workspace Admin permissions](https://support.google.com/a/answer/6365252?hl=en\&ref_topic=4388346) -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)** -- **The following in Dagster Cloud:** - - [Access to a user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Add the Dagster Cloud app in Google Workspace - -1. Navigate to your Google Admin Console: - -2. Using the sidebar, navigate to **Apps > Web and mobile apps**: - - Google Workspace Sidebar - -3. On the **Web and mobile apps** page, click **Add App > Add custom SAML app**: - - Add App - - This opens a new page for adding app details. - ---- - -## Step 2: Configure SSO in Google Workspace - -1. On the **App details** page: - - 1. Fill in the **App name** field. - - 2. Fill in the **Description** field. - - The page should look similar to the following: - - Application Details - - 3. Click **Continue**. - -2. On the **Google Identity Provider details** page, click **Continue**. No action is required for this page. - -3. On the **Service provider details** page: - - 1. In the **ACS URL** and **Entity ID** fields: - - Copy and paste the following URL, replacing `` with your Dagster Cloud organization name: - - https://.dagster.cloud/auth/saml/consume - - 2. Check the **Signed Response** box. - - The page should look similar to the image below. In this example, the organization's name is `hooli` and the Dagster Cloud domain is `https://hooli.dagster.cloud`: - - Service Provider Details - - 3. When finished, click **Continue**. - -4. On the **Attributes** page: - - 1. Click **Add mapping** to add and configure the following attributes: - - - **Basic Information > First Name** - `FirstName` - - **Basic Information > Last Name** - `LastName` - - **Basic Information > Email** - `Email` - - The page should look like the following image: - - Attribute Mapping - - 2. Click **Finish**. - ---- - -## Step 3: Upload the SAML metadata to Dagster Cloud - -Next, you'll save and upload the application's SAML metadata to Dagster Cloud. This will enable single sign-on. - -1. In your Google Workspace, open the Dagster Cloud application you added in [Step 2](#step-2-configure-sso-in-google-workspace). - -2. Click **Download metadata**: - - SAML Metadata - -3. In the modal that displays, click **Download metadata** to start the download. Save the file to your computer. - -4. After you've downloaded the SAML metadata file, upload it to Dagster Cloud using the `dagster-cloud` CLI: - - ```shell - dagster-cloud organization settings saml upload-identity-provider-metadata \ - --api-token= \ - --url https://.dagster.cloud - ``` - ---- - -## Step 4: Grant access to users - -In this step, you'll assign users in your Google Workspace to the Dagster Cloud application. This allows members of the workspace to log in to Dagster Cloud using their credentials when the single sign-on flow is initiated. - -1. In the Google Workspace Dagster Cloud application, click **User access**. -2. Select an organizational unit. -3. Click **ON for everyone**. -4. Click **Save**. - - Assign New Login - ---- - -## Step 5: Test your SSO configuration - -Lastly, you'll test your SSO configuration: - -- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) -- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) - -### Testing a service provider-initiated login - -1. Navigate to your Dagster Cloud sign in page at `https://.dagster.cloud` - -2. Click the **Sign in with SSO** button. - -3. Initiate the login flow and address issues that arise, if any. - -### Testing an identity provider-initiated login - -In the Google Workspace portal, click on the **Dagster Cloud icon**. If successful, you'll be automatically signed into your Dagster Cloud organization. diff --git a/docs/content/dagster-cloud/account/authentication/setting-up-onelogin-saml-sso.mdx b/docs/content/dagster-cloud/account/authentication/setting-up-onelogin-saml-sso.mdx deleted file mode 100644 index 21a246023871b..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/setting-up-onelogin-saml-sso.mdx +++ /dev/null @@ -1,145 +0,0 @@ ---- -title: "Setting up OneLogin SSO for Dagster Cloud | Dagster Docs" - -platform_type: "cloud" -display_name: "OneLogin" -feature_name: "saml_sso_onelogin" -pricing_plan: "enterprise" ---- - -# Setting up OneLogin SSO for Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure OneLogin to use single sign-on (SSO) with your Dagster Cloud organization. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **The following in OneLogin:** - - An existing OneLogin account - - Admin permissions -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)** -- **The following in Dagster Cloud:** - - [Access to a user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Add the Dagster Cloud app in OneLogin - -1. Sign into your OneLogin portal. - -2. Navigate to **Administration > Applications**. - -3. On the **Applications** page, click **Add App**. - -4. On the **Find Applications** page, search for `Dagster Cloud`: - - Find Applications - -5. Add and save the application. - ---- - -## Step 2: Configure SSO in OneLogin - -1. In OneLogin, open the application and navigate to its **Configuration**. - -2. In the **Dagster Cloud organisation name** field, enter your Dagster Cloud organization name. This is used to route the SAML response to the correct Dagster Cloud subdomain. - - In the following example, the organization name is `hooli` and our Dagster Cloud domain is `https://hooli.dagster.cloud`. To configure this correctly, we'd enter `hooli` into the **Subdomain** field: - - Okta Subdomain Configuration - -3. When finished, click **Done**. - ---- - -## Step 3: Upload the SAML metadata to Dagster Cloud - -Next, you'll save and upload the application's SAML metadata to Dagster Cloud. This will enable single sign-on. - -1. In OneLogin, open the Dagster Cloud application. - -2. Navigate to **More Actions > SAML Metadata**. - -3. When prompted, save the file to your computer. - -4. After you've downloaded the SAML metadata file, upload it to Dagster Cloud using the `dagster-cloud` CLI: - - ```shell - dagster-cloud organization settings saml upload-identity-provider-metadata \ - --api-token= \ - --url https://.dagster.cloud - ``` - ---- - -## Step 4: Grant access to users - -Next, you'll assign users to the Dagster Cloud application in OneLogin. This will allow them to log in using their OneLogin credentials with the sign in flow is initiated. - -1. In Okta, navigate to **Users**. - -2. Select a user. - -3. On the user's page, click **Applications**. - -4. Assign the user to Dagster Cloud. In the following image, we've assigned user `Test D'Test` to Dagster Cloud: - - Assign New Login - -5. Click **Continue**. - -6. Click **Save User.** - -7. Repeat steps 2-6 for every user you want to access Dagster Cloud. - ---- - -## Step 5: Test your SSO configuration - -Lastly, you'll test your SSO configuration: - -- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) -- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) - -### Testing a service provider-initiated login - -1. Navigate to your Dagster Cloud sign in page at `https://.dagster.cloud` - -2. Click the **Sign in with SSO** button. - -3. Initiate the login flow and address issues that arise, if any. - -### Testing an identity provider-initiated login - -In the OneLogin portal, click the Dagster Cloud icon: - - - -If successful, you'll be automatically signed into your Dagster Cloud organization. diff --git a/docs/content/dagster-cloud/account/authentication/setting-up-pingone-saml-sso.mdx b/docs/content/dagster-cloud/account/authentication/setting-up-pingone-saml-sso.mdx deleted file mode 100644 index 567676c6c728d..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/setting-up-pingone-saml-sso.mdx +++ /dev/null @@ -1,192 +0,0 @@ ---- -title: "Setting up PingOne SSO for Dagster Cloud | Dagster Docs" - -display_name: "PingOne" -feature_name: "saml_sso_pingone" -pricing_plan: "enterprise" ---- - -# Setting up PingOne SSO for Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure PingOne to use single sign-on (SSO) with your Dagster Cloud organization. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **The following in PingOne:** - - An existing PingOne account - - Organization admin permissions -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)** -- **The following in Dagster Cloud:** - - [Access to a user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) - - [Organization Admin permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization - ---- - -## Step 1: Add the Dagster Cloud app in PingOne - -1. Sign into your PingOne Console. - -2. Using the sidebar, click **Connections > Applications**. - - PineOne Sidebar - -3. On the **Applications** page, add an application. - -4. In **Select an application type**, click **Web app**. - -5. Click **SAML > Configure**: - - Add App - ---- - -## Step 2: Configure SSO in PingOne - -1. In the **Create App Profile** page: - - 1. Add an application name, description, and icon: - - Application Details - - 2. When finished, click **Save and Continue.** - -2. In the **Configure SAML** page: - - 1. Fill in the following: - - - **ACS URLS** and **Entity ID**: Copy and paste the following URL, replacing `` with your Dagster Cloud organization name: - - https://.dagster.cloud/auth/saml/consume - - - **Assertion Validity Duration**: Type `60`. - - In the following example, the organization’s name is `hooli` and the Dagster Cloud domain is `https://hooli.dagster.cloud`: - - Service Provider Details - - 2. When finished, click **Save and Continue.** - -3. In the **Map Attributes** page: - - 1. Configure the following attributes: - - | Application attribute | Outgoing value | - | --------------------- | -------------- | - | Email | Email Address | - | FirstName | Given Name | - | LastName | Family Name | - - The page should look similar to the following: - - Attribute Mapping - - 2. When finished, click **Save and Continue.** - ---- - -## Step 3: Upload the SAML metadata to Dagster Cloud - -Next, you'll save and upload the application's SAML metadata to Dagster Cloud. This will enable single sign-on. - -1. In PingOne, open the Dagster Cloud application. - -2. Click the **Configuration** tab. - -3. In the **Connection Details** section, click **Download Metadata**: - - SAML Metadata - -4. When prompted, save the file to your computer. - -5. After you've downloaded the SAML metadata file, upload it to Dagster Cloud using the `dagster-cloud` CLI: - - ```shell - dagster-cloud organization settings saml upload-identity-provider-metadata \ - --api-token= \ - --url https://.dagster.cloud - ``` - ---- - -## Step 4: Grant access to users - -Next, you'll assign users to the Dagster Cloud application in PingOne. This will allow them to log in using their PingOne credentials when the single sign-on flow is initiated. - -1. In the Dagster Cloud application, click the **Access** tab. - -2. Click the **pencil icon** to edit the **Group membership policy**: - - Assign New Login - -3. Edit the policy as needed to grant users access to the application. - ---- - -## Step 5: Test your SSO configuration - -Lastly, you'll test your SSO configuration: - -- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) -- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) - -### Testing a service provider-initiated login - -1. Navigate to your Dagster Cloud sign in page at `https://.dagster.cloud` - -2. Click the **Sign in with SSO** button. - -3. Initiate the login flow and address issues that arise, if any. - -### Testing an identity provider-initiated login - -In the PingOne application portal, click the **Dagster Cloud** icon: - - - -If successful, you'll be automatically signed in to your Dagster Cloud organization. diff --git a/docs/content/dagster-cloud/account/authentication/utilizing-scim-provisioning.mdx b/docs/content/dagster-cloud/account/authentication/utilizing-scim-provisioning.mdx deleted file mode 100644 index e3f9a67aa1c7e..0000000000000 --- a/docs/content/dagster-cloud/account/authentication/utilizing-scim-provisioning.mdx +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: "Utilizing SCIM provisioning in Dagster Cloud | Dagster Docs" - -display_name: "SCIM" -feature_name: "scim" -pricing_plan: "enterprise" ---- - -# Utilizing SCIM provisioning in Dagster Cloud - -The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. With SCIM, you can: - - - ---- - -## Understanding SCIM provisioning - -SCIM provisioning eases the burden of manually provisioning users across your cloud applications. When enabled, you can automatically sync user information from your IdP to Dagster Cloud and back again, ensuring user data is always up-to-date. - -For a detailed look at SCIM provisioning, [check out this blog post](https://www.strongdm.com/blog/scim-provisioning). - -### Managing users - -When SCIM is enabled in Dagster Cloud, a few things about user management will change: - -- **New users must be added in the IdP.** The ability to add new users will be disabled in Dagster Cloud while SCIM is enabled. -- **Only 'unsynced' users can be removed in Dagster Cloud.** 'Synced' users will have an icon indicating they're externally managed by the IdP, while unsynced users will not. For example, the first two users in the following image are synced, while the last isn't: - - Highlighted unsynced user in the Dagster Cloud UI - - You might see unsynced users in Dagster Cloud when: - - - **Users exist in Dagster Cloud, but not in the IdP.** In this case, create matching users in the IdP and then provision them. This will link the IdP users to the Dagster Cloud users. - - **Users are assigned to the Dagster Cloud IdP app before provisioning is enabled.** In this case, you'll need to provision the users in the IdP to link them to the Dagster Cloud users. - -If you choose to disable SCIM provisioning in Dagster Cloud, users and teams will remain as-is at the time SCIM is disabled. - -### Managing teams - -In addition to the above user management changes, there are a few things to keep in mind when managing user groups, otherwise known as Dagster Cloud [teams](/dagster-cloud/account/managing-users/managing-teams). - -User groups in your IdP can be mapped to Dagster Cloud teams, allowing you to centralize the management of user groups and memberships. When SCIM is enabled: - -- **Teams can still be managed in Dagster Cloud.** You can choose to map and sync these teams to the IdP or administer them solely in Dagster Cloud. Synced groups should be managed only in the IdP, or changes made in Dagster Cloud may be overwritten when a sync is triggered from the IdP. -- **If a group exists only in the IdP** and is synced to Dagster Cloud, you'll be prompted to either create a new Dagster Cloud team with the same name or create a link between the IdP group and an existing team in Dagster Cloud. -- **If a group exists only in Dagster Cloud**, the group will display in the IdP as an 'external' group with no members. In this case, you can either create a new group in the IdP and link it to an existing Dagster Cloud team, or choose to manage the team only in Dagster Cloud. - ---- - -## Enabling SCIM provisioning - -### Prerequisites - -To use SCIM provisioning, you'll need: - -- A Dagster Cloud Enterprise plan -- [An IdP for which Dagster Cloud supports SSO and SCIM provisioning](#supported-identity-providers) -- Permissions in your IdP that allow you to configure SSO and SCIM provisioning - -### Supported Identity Providers - -Dagster Cloud currently supports SCIM provisioning for the following Identity Providers (IdP): - - - - - - -Use the setup guide for your IdP to get started. - ---- - -## Related - - - - - diff --git a/docs/content/dagster-cloud/account/managing-user-agent-tokens.mdx b/docs/content/dagster-cloud/account/managing-user-agent-tokens.mdx deleted file mode 100644 index 293c007df0006..0000000000000 --- a/docs/content/dagster-cloud/account/managing-user-agent-tokens.mdx +++ /dev/null @@ -1,69 +0,0 @@ ---- -title: Managing user and agent tokens in Dagster Cloud | Dagster Docs ---- - -# Managing user and agent tokens in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, we'll walk you through creating and revoking user and agent tokens in Dagster Cloud. - ---- - -## Managing agent tokens - - - To manage agent tokens, you need to be an{" "} - Organization Admin in - Dagster Cloud. - - -Agent tokens are used to authenticate [Hybrid agents](/dagster-cloud/deployment/agents) with the Dagster Cloud Agents API. - -1. Sign in to your Dagster Cloud account. -2. Click the **user menu (your icon) > Organization Settings**. -3. Click the **Tokens** tab. -4. Click **+ Create agent token**. - -After the token is created: - -- **To edit a token's description**, click the **pencil icon**. -- **To view a token**, click **Reveal token**. Clicking on the token value will copy it to the clipboard. -- **To revoke a token**, click **Revoke**. - ---- - -## Managing user tokens - - - To manage agent tokens, you need{" "} - - one of the following user roles - {" "} - in Dagster Cloud: -
      -
    • An Organization Admin, or
    • -
    • An Editor or Admin in at least one deployment
    • -
    -
    - -1. Sign in to your Dagster Cloud account. -2. Click the **user menu (your icon) > Organization Settings**. -3. Click the **Tokens** tab. -4. Click **+ Create user token**. - -After the token is created: - -- **To edit a token's description**, click the **pencil icon**. -- **To view a token**, click **Reveal token**. Clicking on the token value will copy it to the clipboard. -- **To revoke a token**, click **Revoke**. - -To manage tokens for another user, select the user from the **Manage tokens for** dropdown: - - - -**Note**: **Admin** or **Organization Admin** permissions are required to manage another user's tokens. diff --git a/docs/content/dagster-cloud/account/managing-users.mdx b/docs/content/dagster-cloud/account/managing-users.mdx deleted file mode 100644 index 9ba44aaed9c45..0000000000000 --- a/docs/content/dagster-cloud/account/managing-users.mdx +++ /dev/null @@ -1,131 +0,0 @@ ---- -title: Managing users in Dagster Cloud | Dagster Docs ---- - -# Managing users in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, we'll cover how to add and remove users in your Dagster Cloud organization. - -**Note**: If utilizing [SCIM provisioning](/dagster-cloud/account/authentication/utilizing-scim-provisioning), you'll need to manage users through your Identity Provider (IdP) instead of Dagster Cloud. - ---- - -## Adding users - - - Organization Admin or Admin permissions are - required to add users in Dagster Cloud. - - -Before you start, note that: - -- **If SCIM provisioning is enabled,** you'll need to add new users in your IdP. Adding users will be disabled in Dagster Cloud. -- **If using Google for SSO**, users must be added in Dagster Cloud before they can log in. -- **If using an Identity Provider (IdP) like Okta for SSO**, users must be assigned to the Dagster app in the IdP to be able to log in to Dagster Cloud. Refer to the [SSO setup guides](/dagster-cloud/account/authentication#single-sign-on) for setup instructions for each of our supported IdP solutions. - - By default, users will be granted Viewer permissions on each deployment. The default role can be adjusted by modifying the [`sso_default_role` deployment setting](/dagster-cloud/managing-deployments/deployment-settings-reference#sso-default-role). - -1. Sign in to your Dagster Cloud account. -2. Click the **user menu (your icon) > Organization Settings**. -3. Click the **Users** tab. -4. Click **Add new user.** -5. In the **User email** field, enter the user's email address. -6. Click **Add user**. The user will be added to the list of users. - -After the user is created, you can [add the user to teams and assign user roles for each deployment](#managing-user-permissions). - ---- - -## Managing user permissions - - - Organization Admin permissions are required to manage users - in Dagster Cloud. - - -After a user is created, the **Manage user permissions** window will automatically display. You can also access this window by clicking **Edit** next to a user in the users table. - - - - - -### Adding users to teams - -Teams are a Dagster Cloud Enterprise feature. - -Using the **Teams** field, you can add users to one or more teams. This is useful for centralizing permission sets for different types of users. Refer to the [Managing teams](/dagster-cloud/account/managing-users/managing-teams) guide for more info about creating and managing teams. - - - - - -**Note**: When determining a user's level of access, Dagster Cloud will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. Refer to the [Managing user roles and permissions](/dagster-cloud/account/managing-users/managing-user-roles-permissions#applying-role-overrides) guide for more info. - -### Assigning user roles - -In the **Roles** section, you can assign the select the appropriate [user role](/dagster-cloud/account/managing-users/managing-user-roles-permissions) for each deployment. - -1. Next to a deployment, click **Edit user role**. -2. Select the user role for the deployment. This [user role](/dagster-cloud/account/managing-users/managing-user-roles-permissions) will be used as the default for all code locations in the deployment. -3. Click **Save**. -4. **Enterprise only**: To set permissions for individual [code locations](/dagster-cloud/account/managing-users/managing-user-roles-permissions#code-locations) in a deployment: - 1. Click the toggle to the left of the deployment to open a list of code locations. - 2. Next to a code location, click **Edit user role**. - 3. Select the user role for the code location. - 4. Click **Save**. -5. Repeat the previous steps for each deployment. -6. **Optional**: To change the user's permissions for branch deployments: - 1. Next to **All branch deployments**, click **Edit user role**. - 2. Select the user role to use for all branch deployments. - 3. Click **Save**. -7. Click **Done**. - ---- - -## Removing users - - - Organization Admin permissions are required to remove users - in Dagster Cloud. - - -Removing a user removes them from the organization. **Note**: If using a SAML-based SSO solution like Okta, you'll also need to remove the user from the IdP. Removing the user in Dagster Cloud doesn't remove them from the IdP. - -1. Sign in to your Dagster Cloud account. -2. Click the **user menu (your icon) > Organization Settings**. -3. Click the **Users** tab. -4. Locate the user in the user list. -5. Click **Edit**. -6. Click **Remove user**. -7. When prompted, confirm the removal. - ---- - -## Related - - - - - - diff --git a/docs/content/dagster-cloud/account/managing-users/managing-teams.mdx b/docs/content/dagster-cloud/account/managing-users/managing-teams.mdx deleted file mode 100644 index 59e73b8abeaa3..0000000000000 --- a/docs/content/dagster-cloud/account/managing-users/managing-teams.mdx +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Managing teams in Dagster Cloud | Dagster Docs ---- - -# Managing teams in Dagster Cloud - -This guide is applicable to Dagster Cloud Enterprise. - -As part of our [role-based access control (RBAC) feature](/dagster-cloud/account/managing-users/managing-user-roles-permissions), Dagster Cloud supports the ability to assign users to teams. A team is a group of users with a set of default deployment, code location, and Branch Deployment user roles. - -In this guide, we'll cover how to add, manage, and remove teams in Dagster Cloud. - ---- - -## Prerequisites - -To use this feature, you'll need a **Dagster Cloud Enterprise plan.** - ---- - -## Adding teams - - - - Organization Admin permissions - {" "} - are required to add teams. - - -1. In the Dagster Cloud UI, click the **user menu (your icon) > Organization Settings**. -2. Click the **Teams** tab. -3. Click the **Create a team** button. -4. In the window that displays, enter a name in the **Team name** field. -5. Click **Create team**. - -After the team is created, you can [add users and assign user roles to deployments](#managing-team-members-and-roles). - ---- - -## Managing team members and roles - -In the **Organization Settings > Teams** tab: - -1. Locate the team you want to modify in the table of teams. -2. Click the **Edit** button in the team's row. - -From here, you can [manage team members](#managing-team-members) and [the team's roles for deployments](#managing-team-roles). - -### Managing team members - - - - Organization Admin permissions - {" "} - are required to add and remove team members. - - -#### Adding team members - -1. In the **Members** tab, use the search bar to locate a user in your organization. -2. Once located, click the user. -3. Click **Add user to team**. -4. Repeat as needed, clicking **Done** when finished. - -#### Removing team members - -1. In the **Members** tab, locate the user in the list of team members. -2. Click **Remove from team**. -3. Repeat as needed, clicking **Done** when finished. - -### Managing team roles - - - - Organization Admin or Admin permissions - {" "} - are required to manage team roles. Additionally, Admins can only manage teams - for deployments where they are an Admin. - - -1. In the **Roles** tab, click the **Edit team role** button next to the deployment where you want to modify the team's role. -2. In the window that displays, select the team role for the deployment. This [role](/dagster-cloud/account/managing-users/managing-user-roles-permissions) will be used as the default for this team for all code locations in the deployment. -3. Click **Save**. -4. To set permissions for individual [code locations](/dagster-cloud/account/managing-users/managing-user-roles-permissions#code-locations) in a deployment: - 1. Click the toggle to the left of the deployment to open a list of code locations. - 2. Next to a code location, click **Edit team role**. - 3. Select the team role for the code location. - 4. Click **Save**. - ---- - -## Removing teams - - - - Organization Admin permissions - {" "} - are required to remove teams. - - -1. In the Dagster Cloud UI, click the **user menu (your icon) > Organization Settings**. -2. Click the **Teams** tab. -3. Locate the team you want to delete in the table of teams. -4. Click the **Edit** button in the team's row. -5. Click the **Delete team** button. -6. When prompted, confirm the deletion. - ---- - -## Related - - - - - - diff --git a/docs/content/dagster-cloud/deployment.mdx b/docs/content/dagster-cloud/deployment.mdx deleted file mode 100644 index 106778785de2d..0000000000000 --- a/docs/content/dagster-cloud/deployment.mdx +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: "Dagster Cloud deployment types | Dagster Docs" ---- - -# Dagster Cloud deployment types - -Dagster Cloud currently offers two deployment options to meet your needs: Serverless and Hybrid. - ---- - -## Serverless deployments - -A Serverless deployment allows you to run Dagster jobs without spinning up any infrastructure. This fully-managed version of Dagster Cloud is the easiest way to get started with Dagster. - -[Learn more about Serverless deployments](/dagster-cloud/deployment/serverless). - ---- - -## Hybrid deployments - -A Hybrid deployment utilizes a combination of your infrastructure and Dagster-hosted backend services. - -The Dagster backend services - including the web frontend, GraphQL API, metadata database, and daemons (responsible for executing schedules and sensors) - are hosted in Dagster Cloud. - -An [agent](/dagster-cloud/deployment/agents) you run in your infrastructure is responsible for executing your Dagster code. - -[Learn more about Hybrid deployments](/dagster-cloud/deployment/hybrid). diff --git a/docs/content/dagster-cloud/deployment/agents.mdx b/docs/content/dagster-cloud/deployment/agents.mdx deleted file mode 100644 index 805638060b1e6..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents.mdx +++ /dev/null @@ -1,159 +0,0 @@ ---- -title: "Dagster Cloud Hybrid agents | Dagster Docs" - -platform_type: "cloud" ---- - -# Dagster Cloud Hybrid agents - -For [Hybrid deployments](/dagster-cloud/deployment/hybrid), Dagster Cloud uses an agent that is responsible for executing your code. The agent streams metadata about code execution over HTTPS to Dagster Cloud’s Agent API. - -Dagster Cloud currently supports agents running on the following backends: - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Name - How it works
    - Amazon Elastic Container Service (ECS) - - The Amazon Elastic Container Service (ECS) agent executes Dagster jobs - as Amazon Web Services (AWS) ECS tasks. This agent is appropriate for - scaled production deployments; ECS is a good choice for teams who have - already standardized on ECS or who don't plan to run their own container - orchestration infrastructure. -
    - Docker - - The Docker agent executes Dagster jobs in Docker containers on your - computer. -
    - Kubernetes - - The Kubernetes agent executes Dagster jobs on a Kubernetes cluster. This - agent is appropriate for scaled production deployments and is a good - choice for teams who have already standardized on Kubernetes. -
    - Local - - The local agent executes Dagster jobs as processes on your computer. -
    - ---- - -## Amazon ECS agents - - - - - - - - - ---- - -## Docker agents - - - - - - ---- - -## Kubernetes agents - - - - - - ---- - -## Local agents - - - - - ---- - -## Customizing agents - - - - - - diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs.mdx b/docs/content/dagster-cloud/deployment/agents/amazon-ecs.mdx deleted file mode 100644 index b41ea36fa0a1d..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs.mdx +++ /dev/null @@ -1,32 +0,0 @@ ---- -title: "Dagster Cloud Amazon ECS agents | Dagster Docs" ---- - -# Dagster Cloud Amazon ECS agents - -Used with a Dagster Cloud [Hybrid deployment](/dagster-cloud/deployment/hybrid), the Amazon Elastic Container Service (ECS) agent executes Dagster jobs as Amazon Web Services (AWS) ECS tasks. - -This agent is appropriate for scaled production deployments; ECS is a good choice for teams who have already standardized on ECS or who don't plan to run their own container orchestration infrastructure. - - - - - - - - diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference.mdx b/docs/content/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference.mdx deleted file mode 100644 index cbeb1434d8040..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference.mdx +++ /dev/null @@ -1,308 +0,0 @@ ---- -title: Amazon ECS agent configuration reference | Dagster Docs - -platform_type: "cloud" ---- - -# Amazon ECS agent configuration reference - -This guide is applicable to Dagster Cloud. - -This reference describes the various configuration options Dagster Cloud currently supports for [Amazon ECS agents](/dagster-cloud/deployment/agents/amazon-ecs). - ---- - -## Per-location configuration - -When [adding a code location](/dagster-cloud/managing-deployments/code-locations) to Dagster Cloud with an Amazon ECS agent, you can use the `container_context` key on the location configuration to add additional ECS-specific configuration that will be applied to any ECS tasks associated with that code location. - -**Note**: If you're using the Dagster Cloud Github action, the `container_context` key can also be set for each location in your `dagster_cloud.yaml` file. - -The following example [`dagster_cloud.yaml`](/dagster-cloud/managing-deployments/dagster-cloud-yaml) file illustrates the available fields: - -```yaml -locations: - - location_name: cloud-examples - image: dagster/dagster-cloud-examples:latest - code_source: - package_name: dagster_cloud_examples - container_context: - ecs: - env_vars: - - DATABASE_NAME=staging - - DATABASE_PASSWORD - secrets: - - name: "MY_API_TOKEN" - valueFrom: "arn:aws:secretsmanager:us-east-1:123456789012:secret:FOO-AbCdEf:token::" - - name: "MY_PASSWORD" - valueFrom: "arn:aws:secretsmanager:us-east-1:123456789012:secret:FOO-AbCdEf:password::" - secrets_tags: - - "my_tag_name" - server_resources: # Resources for code servers launched by the agent for this location - cpu: 256 - memory: 512 - run_resources: # Resources for runs launched by the agent for this location - cpu: 4096 - memory: 16384 - execution_role_arn: arn:aws:iam::123456789012:role/MyECSExecutionRole - task_role_arn: arn:aws:iam::123456789012:role/MyECSTaskRole - mount_points: - - sourceVolume: myEfsVolume - containerPath: "/mount/efs" - readOnly: True - volumes: - - name: myEfsVolume - efsVolumeConfiguration: - fileSystemId: fs-1234 - rootDirectory: /path/to/my/data - server_sidecar_containers: - - name: DatadogAgent - image: public.ecr.aws/datadog/agent:latest - environment: - - name: ECS_FARGATE - value: true - run_sidecar_containers: - - name: DatadogAgent - image: public.ecr.aws/datadog/agent:latest - environment: - - name: ECS_FARGATE - value: true - server_ecs_tags: - - key: MyEcsTagKey - value: MyEcsTagValue - run_ecs_tags: - - key: MyEcsTagKeyWithoutValue - repository_credentials: MyRepositoryCredentialsSecretArn -``` - -### Environment variables and secrets - - - -Refer to the following guides for more info about environment variables: - -- [Dagster Cloud environment variables and secrets](/dagster-cloud/managing-deployments/environment-variables-and-secrets) -- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) - ---- - -## Per-job configuration: Resource limits - -You can use job tags to customize the CPU and memory of every run for that job: - -```py -from dagster import job, op - -@op() -def my_op(context): - context.log.info('running') - -@job( - tags = { - "ecs/cpu": "256", - "ecs/memory": "512", - } -) -def my_job(): - my_op() -``` - -[Fargate tasks only support certain combinations of CPU and memory.](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-cpu-memory-error.html) - -If the `ecs/cpu` or `ecs/memory` tags are set, they will override any defaults set on the code location or the deployment. - ---- - -## Per-deployment configuration - -This section describes the properties of the `dagster.yaml` configuration file used by Amazon ECS agents. Typically, this file is created by the CloudFormation template that deploys the agent and can be found within the agent task definition's command. - -To change these properties, edit the CloudFormation template and redeploy the CloudFormation stack. - -```yaml -instance_class: - module: dagster_cloud - class: DagsterCloudAgentInstance - -dagster_cloud_api: - agent_token: - deployments: - - - - - branch_deployments: - -user_code_launcher: - module: dagster_cloud.workspace.ecs - class: EcsUserCodeLauncher - config: - cluster: - subnets: - - - - - security_group_ids: - - - service_discovery_namespace_id: - execution_role_arn: - task_role_arn: - log_group: - launch_type: <"FARGATE"|"EC2"> - server_process_startup_timeout: - server_resources: - cpu: - memory: - server_sidecar_containers: - - name: SidecarName - image: SidecarImage - - run_resources: - cpu: - memory: - run_sidecar_containers: - - name: SidecarName - image: SidecarImage - - mount_points: - - - volumes: - - - server_ecs_tags: - - key: MyEcsTagKey - value: MyEcsTagValue - run_ecs_tags: - - key: MyEcsTagKeyWithoutValue - repository_credentials: MyRepositoryCredentialsSecretArn - isolated_agents: - enabled: -``` - -### dagster_cloud_api properties - - - - An agent token for the agent to use for authentication. - - - The names of full deployments for the agent to serve. - - - Whether the agent should serve all branch deployments. - - - -### user_code_launcher properties - - - - The name of an ECS cluster with a Fargate or EC2 capacity provider. - - - An ECS launch type to use for your launched ECS tasks. The following are currently supported: -
      -
    • FARGATE
    • -
    • EC2 - Note: Using this launch type requires you to have an EC2 capacity provider installed and additional operational overhead to run the agent.
    • -
    -
    - - At least one subnet is required. Dagster Cloud tasks require a route to the internet so they can access our API server. How this requirement is satisfied depends on the type of subnet provided: - -
      -
    • - Public subnets - The ECS agent will assign each task a public IP address. Note that ECS tasks on EC2 launched within public subnets do not have access to the internet, so a public subnet will only work for Fargate tasks. -
    • -
    • - Private subnets - The ECS agent assumes you've configured a NAT gateway with an attached NAT gateway. Tasks will not be assigned a public IP address. -
    • -
    -
    - - A list of security groups to use for tasks launched by the agent. - - - The name of a private DNS namespace. -



    - The ECS agent launches each code location as its own ECS service. The agent communicates with these services via AWS CloudMap service discovery. -
    - - The ARN of the Amazon ECS task execution IAM role. This role allows ECS to interact with AWS resources on your behalf, such as getting an image from ECR or pushing logs to CloudWatch. -



    - Note: This role must include a trust relationship that allows ECS to use it. -
    - - The ARN of the Amazon ECS task IAM role. This role allows the containers running in the ECS task to interact with AWS. -



    - Note: This role must include a trust relationship that allows ECS to use it. -
    - - The name of a CloudWatch log group. - - - The amount of time, in seconds, to wait for code to import when launching a new service for a code location. If your code takes an unusually long time to load after your ECS task starts up and results in timeouts in the Deployment tab, you can increase this setting above the default. Note This setting isn't applicable to the time it takes for a job to execute. -
      -
    • Default - 180 (seconds)
    • -
    -
    - - How long (in seconds) to wait for ECS to spin up a new service and task for a code server. If your ECS tasks take an unusually long time to start and result in timeouts, you can increase this setting above the default. -
      -
    • Default - 300 (seconds)
    • -
    -
    - - How long (in seconds) to continue polling if an ECS API endpoint fails during creation of a new code server (because the ECS API is eventually consistent). -
      -
    • Default - 30 (seconds)
    • -
    -
    - - The resources that the agent should allocate to the ECS service for each code location that it creates. If set, must be a dictionary with a cpu and/or memory key. Note: Fargate tasks only support certain combinations of CPU and memory. - - - Additional sidecar containers to include along with the Dagster container. If set, must be a list of dictionaries with valid ECS container definitions. - - - The resources that the agent should allocate to the ECS task that it creates for each run. If set, must be a dictionary with a cpu and/or memory key. Note: Fargate tasks only support certain combinations of CPU and memory. - - - Additional sidecar containers to include along with the Dagster container. If set, must be a list of dictionaries with valid ECS container definitions. - - - Mount points to include in the Dagster container. If set, should be a list of dictionaries matching the mountPoints field when specifying a container definition to boto3. - - - Additional volumes to include in the task definition. If set, should be a list of dictionaries matching the volumes argument to register_task_definition in boto3. - - - Additional ECS tags to include in the service for each code location. If set, must be a list of dictionaries, each with a key key and optional value key. - - - Additional ECS tags to include in the task for each run. If set, must be a list of dictionaries, each with a key key and optional value key. - - - Optional arn of the secret to authenticate into your private container registry. This does not apply if you are leveraging ECR for your images, see https://docs.aws.amazon.com/AmazonECS/latest/userguide/private-auth.html. - - -
    diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx b/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx deleted file mode 100644 index cb3e9b0d7301b..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx +++ /dev/null @@ -1,91 +0,0 @@ ---- -title: Creating an Amazon Elastic Container Service agent in an existing VPC | Dagster Docs ---- - -# Creating an Amazon Elastic Container Service agent in an existing VPC - -This guide is applicable to Dagster Cloud. - -In this guide, you'll set up and deploy an Amazon Elastic Container Service (ECS) agent in an existing VPC using CloudFormation. Amazon ECS agents are used to launch user code in ECS tasks. - -Our CloudFormation template allows you to quickly spin up the ECS agent stack in an existing VPC. It also supports using a new or existing ECS cluster. The template code can be found [here](https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent.yaml). Refer to the [CloudFormation docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html) for more info about CloudFormation. - -**For info about deploying an ECS agent in a new VPC**, check out the [ECS agents in new VPCs guide](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **In Dagster Cloud**: - - - **Your organization and deployment names.** - - **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. - -- **In Amazon Web Services (AWS)**: - - **An existing VPC with the following:** - - **Subnets with access to the public internet**. Refer to the [AWS Work with VPCs guide](https://docs.aws.amazon.com/vpc/latest/userguide/working-with-vpcs.html) for more info. - - **Enabled `enableDnsHostnames` and `enableDnsSupport` DNS attributes**. Refer to the [AWS DNS attributes documentation](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-support) for more info. - - **Optional**: An existing ECS cluster with a [Fargate or EC2 capacity provider](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). The CloudFormation template will create a cluster for you if one isn't specified. - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Install the CloudFormation stack in AWS - -Press the "Launch Stack" button below to install the CloudFormation stack in your AWS account. - -[](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent.yaml) - -**Note**: Creating the CloudFormation stack may take a few minutes. Refresh the [AWS console **Stacks** page](https://console.aws.amazon.com/cloudformation/home#/stacks) to check the status. - ---- - -## Step 3: Configure the agent - -After the stack is installed, you'll be prompted to configure it. In the ECS wizard, fill in the following fields: - -- **Dagster Cloud Organization**: Enter the name of your Dagster Cloud organization. -- **Dagster Cloud Deployment**: Enter the name of the Dagster Cloud deployment you want to use. Leave this field empty if the agent will only serve Branch deployments. -- **Enable Branch Deployments**: Whether to have this agent serve your ephemeral [Branch deployments](/dagster-cloud/managing-deployments/branch-deployments). Only a single agent should have this setting enabled. -- **Agent Token**: Paste the agent token you generated in [Step 1](#step-1-generate-a-dagster-cloud-agent-token). -- **Deploy VPC**: The existing VPC to deploy the agent into. -- **Deploy VPC Subnet**: A public subnet of the existing VPC to deploy the agent into. -- **Existing ECS Cluster**: Optionally, the name of an existing ECS cluster to deploy the agent in. Leave blank to create a new cluster -- **Task Launch Type**: Optionally, the launch type to use for new tasks created by the agent (FARGATE or EC2). Defaults to FARGATE. - -The page should look similar to the following image. In this example, our organization name is `hooli` and our deployment is `prod`: - - - -After you've finished configuring the stack in AWS, you can view the agent in Dagster Cloud. To do so, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: - - - ---- - -## Next steps - -Now that you've got your agent running, what's next? - -- **If you're getting Dagster Cloud set up**, the next step is to [add a code location](/dagster-cloud/managing-deployments/code-locations) using the agent. - -- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster Cloud](/dagster-cloud/managing-deployments/code-locations) guide for more info. - -If you need to upgrade your ECS agent's CloudFormation template, refer to the [upgrade guide](/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template) for more info. diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx b/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx deleted file mode 100644 index b32107b99e7dc..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Creating an Amazon Elastic Container Service agent in a new VPC | Dagster Docs ---- - -# Creating an Amazon Elastic Container Service agent in a new VPC - -This guide is applicable to Dagster Cloud. - -In this guide, you'll set up and deploy an Amazon Elastic Container Service (ECS) agent in a new VPC using CloudFormation. Amazon ECS agents are used to launch user code in ECS tasks. - -Our CloudFormation template allows you to quickly spin up the ECS agent stack. This template sets up an ECS agent from scratch, creating a new VPC and ECS cluster for the agent to run in. The template code can be found [here](https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent-vpc.yaml). Refer to the [CloudFormation docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html) for more info about CloudFormation. - -**For info about deploying an ECS agent in an existing VPC**, check out the [ECS agents in existing VPCs guide](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **In Dagster Cloud**: - - - **Your organization and deployment names.** - - **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. - -- **In Amazon Web Services (AWS), you'll need an account**: - - - **Under its VPC quota limit in the region where you're spinning up the agent.** By default, AWS allows **five VPCs per region**. If you're already at your limit, refer to the [AWS VPC quotas documentation](https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html) for info on requesting a quota increase. - - - **With an ECS service-linked IAM role**. This role is required to complete the setup in ECS. AWS will automatically create the role in your account the first time you create an ECS cluster in the console. However, the IAM role isn't automatically created when ECS clusters are created via CloudFormation. - - If your account doesn't have this IAM role, running the CloudFormation template may fail. - - If you haven't created an ECS cluster before, complete one of the following before proceeding: - - - Create one using the [first run wizard](https://console.aws.amazon.com/ecs/home#/firstRun), or - - Create the IAM role using the [AWS CLI](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using-service-linked-roles.html#create-service-linked-role) - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Install the CloudFormation stack in AWS - -Press the "Launch Stack" button below to install the CloudFormation stack in your AWS account. - -[](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent-vpc.yaml) - -**Note**: Creating the CloudFormation stack may take a few minutes. Refresh the [AWS console **Stacks** page](https://console.aws.amazon.com/cloudformation/home#/stacks) to check the status. - -If the installation fails, verify that your AWS account [meets the requirements listed above](#prerequisites). - ---- - -## Step 3: Configure the agent - -After the stack is installed, you'll be prompted to configure it. In the ECS wizard, fill in the following fields: - -- **Dagster Cloud Organization**: Enter the name of your Dagster Cloud organization. -- **Dagster Cloud Deployment**: Enter the name of the Dagster Cloud deployment you want to use. Leave this field empty if the agent will only serve Branch deployments. -- **Enable Branch Deployments**: Whether to have this agent serve your ephemeral [Branch deployments](/dagster-cloud/managing-deployments/branch-deployments). Only a single agent should have this setting enabled. -- **Agent Token**: Paste the agent token you generated in [Step 1](#step-1-generate-a-dagster-cloud-agent-token). - -The page should look similar to the following image. In this example, our organization name is `hooli` and our deployment is `prod`: - - - -After you've finished configuring the stack in AWS, you can view the agent in Dagster Cloud. To do so, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: - - - ---- - -## Next steps - -Now that you've got your agent running, what's next? - -- **If you're getting Dagster Cloud set up**, the next step is to [add a code location](/dagster-cloud/managing-deployments/code-locations) using the agent. - -- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster Cloud](/dagster-cloud/managing-deployments/code-locations) guide for more info. - -If you need to upgrade your ECS agent's CloudFormation template, refer to the [upgrade guide](/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template) for more info. diff --git a/docs/content/dagster-cloud/deployment/agents/customizing-configuration.mdx b/docs/content/dagster-cloud/deployment/agents/customizing-configuration.mdx deleted file mode 100644 index 60b54472aac9d..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/customizing-configuration.mdx +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Customizing agent configuration | Dagster Docs -description: Configure your agent. - -platform_type: "cloud" ---- - -# Customizing agent configuration - -This guide is applicable to Dagster Cloud. - -The Dagster Cloud Agent is a special variant of the Dagster instance used in [Dagster Open Source](/deployment/dagster-instance) and is configured through the same `dagster.yaml` file. You can customize your agent with these settings. - -**Note:** For [Kubernetes agents](/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent) deployed with the Dagster Cloud Helm chart, you'll need to refer to the Helm chart's config map for customizing the agent. - ---- - -## Enabling user code server TTL - -User code servers support a configurable time-to-live (TTL). The agent will spin down any user code servers that haven’t served requests recently and will spin them back up the next time they’re needed. Configuring TTL can save compute cost because user code servers will spend less time sitting idle. - -To configure TTL: - -```yaml -# dagster.yaml -instance_class: - module: dagster_cloud.instance - class: DagsterCloudAgentInstance - -dagster_cloud_api: - agent_token: - env: DAGSTER_CLOUD_AGENT_TOKEN - deployment: prod - -user_code_launcher: - module: dagster_cloud.workspace.docker - class: DockerUserCodeLauncher - config: - server_ttl: - enabled: true - ttl_seconds: 7200 #2 hours -``` - ---- - -## Streaming compute logs - -You can set up streaming compute logs by configuring the log upload interval (in seconds). - -```yaml -# dagster.yaml -instance_class: - module: dagster_cloud.instance - class: DagsterCloudAgentInstance - -dagster_cloud_api: - agent_token: - env: DAGSTER_CLOUD_AGENT_TOKEN - deployment: prod - -user_code_launcher: - module: dagster_cloud.workspace.docker - class: DockerUserCodeLauncher - -compute_logs: - module: dagster_cloud - class: CloudComputeLogManager - config: - upload_interval: 60 -``` - ---- - -## Disabling compute logs - -You can disable forwarding compute logs to Dagster Cloud by configuring the `NoOpComputeLogManager` setting: - -```yaml -# dagster.yaml -instance_class: - module: dagster_cloud.instance - class: DagsterCloudAgentInstance - -dagster_cloud_api: - agent_token: - env: DAGSTER_CLOUD_AGENT_TOKEN - deployment: prod - -user_code_launcher: - module: dagster_cloud.workspace.docker - class: DockerUserCodeLauncher - -compute_logs: - module: dagster.core.storage.noop_compute_log_manager - class: NoOpComputeLogManager -``` diff --git a/docs/content/dagster-cloud/deployment/agents/docker.mdx b/docs/content/dagster-cloud/deployment/agents/docker.mdx deleted file mode 100644 index 2162b72d8d7dd..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/docker.mdx +++ /dev/null @@ -1,20 +0,0 @@ ---- -title: Dagster Cloud Docker agents | Dagster Docs - -platform_type: "cloud" ---- - -# Dagster Cloud Docker agents - -Used with a Dagster Cloud [Hybrid deployment](/dagster-cloud/deployment/hybrid), the Docker agent executes Dagster jobs in Docker containers on your computer. - - - - - diff --git a/docs/content/dagster-cloud/deployment/agents/docker/configuration-reference.mdx b/docs/content/dagster-cloud/deployment/agents/docker/configuration-reference.mdx deleted file mode 100644 index 0f57c6eb6a2c4..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/docker/configuration-reference.mdx +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: Docker agent configuration reference | Dagster Docs - -platform_type: "cloud" ---- - -# Docker agent configuration reference - -This guide is applicable to Dagster Cloud. - -This reference describes the various configuration options Dagster Cloud currently supports for [Docker agents](/dagster-cloud/deployment/agents/docker/configuring-running-docker-agent). - ---- - -## Environment variables and secrets - - - -Refer to the following guides for more info about environment variables: - -- [Dagster Cloud environment variables and secrets](/dagster-cloud/managing-deployments/environment-variables-and-secrets) -- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) diff --git a/docs/content/dagster-cloud/deployment/agents/kubernetes.mdx b/docs/content/dagster-cloud/deployment/agents/kubernetes.mdx deleted file mode 100644 index 2948b169bc0fd..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/kubernetes.mdx +++ /dev/null @@ -1,20 +0,0 @@ ---- -title: Dagster Cloud Kubernetes agents | Dagster Docs - -platform_type: "cloud" ---- - -# Dagster Cloud Kubernetes agents - -Used with a Dagster Cloud [Hybrid deployment](/dagster-cloud/deployment/hybrid), the Kubernetes agent executes Dagster jobs on a Kubernetes cluster. This agent is appropriate for scaled production deployments and is a good choice for teams who have already standardized on Kubernetes. - - - - - diff --git a/docs/content/dagster-cloud/deployment/agents/kubernetes/configuration-reference.mdx b/docs/content/dagster-cloud/deployment/agents/kubernetes/configuration-reference.mdx deleted file mode 100644 index c1efb0a84e134..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/kubernetes/configuration-reference.mdx +++ /dev/null @@ -1,191 +0,0 @@ ---- -title: Kubernetes agent configuration reference | Dagster Docs - -platform_type: "cloud" ---- - -# Kubernetes agent configuration reference - -This guide is applicable to Dagster Cloud. - -This reference describes the various configuration options Dagster Cloud currently supports for [Kubernetes agents](/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent). - ---- - -## Viewing the Helm chart - -To see the different customizations that can be applied to the Kubernetes agent, you can view the chart's default values: - -```shell -helm repo add dagster-cloud https://dagster-io.github.io/helm-user-cloud -helm repo update -helm show values dagster-cloud/dagster-cloud-agent -``` - -You can also view the chart values on [ArtifactHub](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values). - ---- - -## Per-deployment configuration - -The [`workspace`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value of the Helm chart provides the ability to add configuration for all jobs that are spun up by the agent, across all repositories. To add secrets or mounted volumes to all Kubernetes Pods, you can specify your desired configuration under this value. - -Additionally, the [`imagePullSecrets`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value allows you to specify a list of secrets that should be included when pulling the images for your containers. - ---- - -## Per-location configuration - -When [adding a code location](/dagster-cloud/managing-deployments/code-locations) to Dagster Cloud with a Kubernetes agent, you can use the `container_context` key on the location configuration to add additional Kubernetes-specific configuration. If you're using the Dagster Cloud Github action, the `container_context` key can also be set for each location in your `dagster_cloud.yaml` file, using the same format. - -The following example [`dagster_cloud.yaml`](/dagster-cloud/managing-deployments/dagster-cloud-yaml) file illustrates the available fields: - -```yaml -# dagster_cloud.yaml - -locations: - - location_name: cloud-examples - image: dagster/dagster-cloud-examples:latest - code_source: - package_name: dagster_cloud_examples - container_context: - k8s: - env_config_maps: - - my_config_map - env_secrets: - - my_secret - env_vars: - - FOO_ENV_VAR=foo_value - - BAR_ENV_VAR - image_pull_policy: Always - image_pull_secrets: - - name: my_image_pull_secret - labels: - my_label_key: my_label_value - namespace: my_k8s_namespace - service_account_name: my_service_account_name - volume_mounts: - - mount_path: /opt/dagster/test_mount_path/volume_mounted_file.yaml - name: test-volume - sub_path: volume_mounted_file.yaml - volumes: - - name: test-volume - config_map: - name: test-volume-configmap - server_k8s_config: # Raw kubernetes config for code servers launched by the agent - pod_spec_config: - node_selector: - disktype: standard - container_config: - resources: - limits: - cpu: 100m - memory: 128Mi - run_k8s_config: # Raw kubernetes config for runs launched by the agent - pod_spec_config: - node_selector: - disktype: ssd - container_config: - resources: - limits: - cpu: 500m - memory: 1024Mi -``` - -### Environment variables and secrets - -Using the `container_context.k8s.env_vars` and `container_context.k8s.env_secrets` properties, you can specify environment variables and secrets for a specific code location. For example: - -```yaml -# dagster_cloud.yaml - -location: - - location_name: cloud-examples - image: dagster/dagster-cloud-examples:latest - code_source: - package_name: dagster_cloud_examples - container_context: - k8s: - env_vars: - - database_name - - database_username=hooli_testing - env_secrets: - - database_password -``` - - - - A list of environment variable names to inject into the job, formatted as{" "} - KEY or KEY=VALUE. If only KEY is - specified, the value will be pulled from the current process. - - - A list of secret names, from which environment variables for a job are drawn - using envFrom. Refer to the{" "} - - Kubernetes documentation - {" "} - for more info. - - - -Refer to the following guides for more info about environment variables: - -- [Dagster Cloud environment variables and secrets](/dagster-cloud/managing-deployments/environment-variables-and-secrets) -- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) - ---- - -## Op isolation - -By default, each Dagster job will run in its own Kubernetes pod, with each op running in its own subprocess within the pod. - -You can also configure your Dagster job with the [`k8s_job_executor`](https://docs.dagster.io/\_apidocs/libraries/dagster-k8s#dagster_k8s.k8s_job_executor) to run each op in its own Kubernetes pod. For example: - -```python -from dagster import job -from dagster_k8s import k8s_job_executor - -@job(executor_def=k8s_job_executor) -def k8s_job(): - ... -``` - ---- - -## Per-job and per-op configuration - -To add configuration to specific Dagster jobs, ops, or assets, use the `dagster-k8s/config` tag. For example, to specify that a job should have certain resource limits when it runs. Refer to [Customizing your Kubernetes deployment for Dagster Open Source](/deployment/guides/kubernetes/customizing-your-deployment#per-job-kubernetes-configuration) for more info. - ---- - -## Running as a non-root user - -Starting in 0.14.0, the provided `docker.io/dagster/dagster-cloud-agent` image offers a non-root user with id `1001`. To run the agent with this user, you can specify the [`dagsterCloudAgent`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value in the Helm chart to be: - -```yaml -dagsterCloudAgent: - podSecurityContext: - runAsUser: 1001 -``` - -We plan to make this user the default in a future release. - ---- - -## Grant AWS permissions - -You can provide your Dagster pods with [permissions to assume an AWS IAM role](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) using a [Service Account](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/). For example, you might do this to [configure an S3 IO Manager](/deployment/guides/aws#using-s3-for-io-management). - -1. [Create an IAM OIDC provider for your EKS cluster](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html) -2. [Create an IAM role and and attach IAM policies](https://docs.aws.amazon.com/eks/latest/userguide/associate-service-account-role.html) -3. Update the [ Helm chart](#viewing-the-helm-chart) to associate the IAM role with a service account: - - ```bash - serviceAccount: - create: true - annotations: - eks.amazonaws.com/role-arn: "arn:aws:iam::1234567890:role/my_service_account_role" - ``` - -This will allow your agent and the pods it creates to assume the `my_service_account_role` IAM role. diff --git a/docs/content/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx b/docs/content/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx deleted file mode 100644 index a606b8b52c171..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx +++ /dev/null @@ -1,109 +0,0 @@ ---- -title: Configuring and running a Kubernetes agent | Dagster Docs - -platform_type: "cloud" ---- - -# Configuring and running a Kubernetes agent - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure and run a Kubernetes agent. Kubernetes agents are used to launch your code in Kubernetes Jobs and Services. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. - -- **A Kubernetes cluster into which you can deploy the agent.** This can be a self-hosted Kubernetes cluster or a managed offering like [Amazon EKS](https://aws.amazon.com/eks/), [Azure AKS](https://azure.microsoft.com/en-us/services/kubernetes-service/#overview), or [Google GKE](https://cloud.google.com/kubernetes-engine). - -- **Access to a container registry to which you can push images and from which pods in the Kubernetes cluster can pull images.** This can be: - - - A self-hosted registry, - - A public registry such as [DockerHub](https://hub.docker.com/), or - - A managed offering such as [Amazon ECR](https://aws.amazon.com/ecr/), [Azure ACR](https://azure.microsoft.com/en-us/services/container-registry/#overview), or [Google GCR](https://cloud.google.com/container-registry) - -- **To have Helm installed.** Refer to the [Helm installation documentation](https://helm.sh/docs/intro/install/) for more info. - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Configure the agent - -In this step, you'll create a Kubernetes namespace for your Dagster Cloud resources. You'll also add the agent token to the Kubernetes cluster. - -1. Create a Kubernetes namespace for your Dagster Cloud resources: - - ```shell - kubectl create namespace - ``` - - For example, if the namespace is `dagster-cloud`: - - ```shell - kubectl create namespace dagster-cloud - ``` - -2. Add the agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) as a secret in the Kubernetes cluster: - - ```shell - kubectl create secret generic dagster-cloud-agent-token \ - --from-literal=DAGSTER_CLOUD_AGENT_TOKEN= \ - --namespace dagster-cloud - ``` - ---- - -## Step 3: Start the agent - -In this step, you'll spin up the agent with Helm. - -1. Add the [agent chart repository](https://dagster-io.github.io/helm-user-cloud): - - ```shell - helm repo add dagster-cloud https://dagster-io.github.io/helm-user-cloud - helm repo update - ``` - -2. Install the agent chart, specifying: - - - The namespace into which to install the chart - - The Dagster Cloud deployment the agent will be responsible for. **Note**: When your Dagster Cloud organization is first created, the default deployment is `prod`: - - ```shell - helm upgrade \ - --install user-cloud dagster-cloud/dagster-cloud-agent \ - --namespace dagster-cloud \ - --set dagsterCloud.deployment=prod - ``` - -After the Helm chart is installed into the Kubernetes cluster, you can view the agent in Dagster Cloud. - -In Dagster Cloud, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: - - - ---- - -## Next steps - -Now that you've got your agent running, what's next? - -- **If you're getting Dagster Cloud set up**, the next step is to [add a code location](/dagster-cloud/managing-deployments/code-locations) using the agent. - -- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster Cloud](/dagster-cloud/managing-deployments/code-locations) guide for more info. - -You can also further [configure the jobs the agent runs](/dagster-cloud/deployment/agents/kubernetes/configuration-reference), including adding secrets, mounting volumes, and setting resource limits. diff --git a/docs/content/dagster-cloud/deployment/agents/local.mdx b/docs/content/dagster-cloud/deployment/agents/local.mdx deleted file mode 100644 index 5d37f6e70c95d..0000000000000 --- a/docs/content/dagster-cloud/deployment/agents/local.mdx +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: Configuring and running a local agent | Dagster Docs - -platform_type: "cloud" ---- - -# Configuring and running a local agent - -This guide is applicable to Dagster Cloud. - -In this guide, you'll configure and run a local agent. Local agents are used to launch user code in operating system subprocesses. - -**Considering running the agent in production?** Running the local agent in production can be a good choice if: - -- Load is relatively light (guaranteed to fit on a single node) -- Jobs aren’t computationally intensive and don’t use much memory -- A Kubernetes cluster is prohibitively difficult to set up -- You can restart the agent when you update your code - -Keep in mind that the local agent’s ability to run jobs is limited by the capacity of the single node on which it’s running. - -If you’re running the local agent in production, make sure you’ve set up a supervisor to automatically restart the agent process if it crashes. You’ll also want a system in place to alert you if the VM or container dies, or to automatically restart it. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. -- **To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli)**. This should be in the same environment where the agent will run. - - **Note**: Your Dagster application code and its Python and system requirements must also be installed in this environment. We recommend using Python [virtual environments](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment) to accomplish this. - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Configure the agent - -1. Create a directory on disk to act as your Dagster home. We'll use `~/dagster_home` in our examples, but this directory can be located wherever you want. - -2. In the directory you created, create a `dagster.yaml` file like the following: - - ```yaml - # ~/dagster_home/dagster.yaml - - instance_class: - module: dagster_cloud.instance - class: DagsterCloudAgentInstance - - dagster_cloud_api: - agent_token: - deployment: prod - - user_code_launcher: - module: dagster_cloud.workspace.user_code_launcher - class: ProcessUserCodeLauncher - ``` - -3. In the file, fill in the following: - - - `agent_token` - Add the agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token). This specifies that the agent will launch work in local processes, reading the API URL and authentication token from environment variables. - - - `deployment` - Enter the deployment associated with this instance of the agent. - - In the above example, we specified `prod` as the deployment. This is present when Dagster Cloud organizations are first created. - -4. Save the file. - ---- - -## Step 3: Run the agent - -Next, run the process agent by pointing at the home directory you created: - -```shell -dagster-cloud agent run ~/dagster_home/ -``` - -To view the agent in Dagster Cloud, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: - - - ---- - -## Next steps - -Now that you've got your agent running, what's next? - -- **If you're getting Dagster Cloud set up**, the next step is to [add a code location](/dagster-cloud/managing-deployments/code-locations) using the agent. - -- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster Cloud](/dagster-cloud/managing-deployments/code-locations) guide for more info. diff --git a/docs/content/dagster-cloud/deployment/hybrid.mdx b/docs/content/dagster-cloud/deployment/hybrid.mdx deleted file mode 100644 index 4c3301e5a98c4..0000000000000 --- a/docs/content/dagster-cloud/deployment/hybrid.mdx +++ /dev/null @@ -1,86 +0,0 @@ ---- -title: Hybrid deployments in Dagster Cloud | Dagster Docs ---- - -# Hybrid deployments in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, we'll cover how a Hybrid deployment works in Dagster Cloud, including its system architecture, agent, and security. - ---- - -## Hybrid architecture overview - -A **hybrid deployment** utilizes a combination of your infrastructure and Dagster-hosted backend services. - -The Dagster backend services - including the web frontend, GraphQL API, metadata database, and daemons (responsible for executing schedules and sensors) - are hosted in Dagster Cloud. You are responsible for running an [agent](/dagster-cloud/deployment/agents) in your environment. - - - -Work is enqueued for your agent when: - -- Users interact with the web front end, -- The GraphQL API is queried, or -- Schedules and sensors tick - -The agent polls the agent API to see if any work needs to be done and launches user code as appropriate to fulfill requests. User code then streams metadata back to the agent API (GraphQL over HTTPS) to make it available in Dagster Cloud. - -All user code runs within your environment, in isolation from Dagster system code. - ---- - -## The agent - -Because the agent communicates with the Dagster Cloud control plane over the agent API, it’s possible to support agents that operate in arbitrary compute environments. - -This means that over time, Dagster Cloud’s support for different user deployment environments will expand and custom agents can take advantage of bespoke compute environments such as HPC. - -Refer to the [Agents documentation](/dagster-cloud/deployment/agents) for more info, including the agents that are currently supported. - ---- - -## Security - -This section describes how Dagster Cloud interacts with user code. To summarize: - -- No ingress is required from Dagster Cloud to user environments -- Dagster Cloud doesn't have access to user code. Metadata about the code is fetched over constrained APIs. - -These highlights are described in more detail below: - -- [Interactions and queries](#interactions-and-queries) -- [Runs](#runs) -- [Ingress](#ingress) - -### Interactions and queries - -When Dagster Cloud needs to interact with user code - for instance, to display the structure of a job in the Dagster Cloud user interface, to run the body of a sensor definition, or to launch a run for a job - it enqueues a message for the Dagster Cloud Agent. The Dagster Cloud Agent picks up this message and then launches or queries user code running on the appropriate compute substrate. - -Depending on the agent implementation, user code may run in isolated OS processes, in Docker containers, in ECS Tasks, in Kubernetes Jobs and Services, or in a custom isolation strategy. - -Queries to user code run over a well-defined grpc interface. Dagster Cloud uses this interface to: - -- Retrieve the names, config schemas, descriptions, tags, and structures of jobs, ops, repositories, partitions, schedules, and sensors defined in your code -- Evaluate schedule and sensor ticks and determine whether a run should be launched - -When the agent queries user code, it writes the response back to Dagster Cloud over a well-defined GraphQL interface. - -### Runs - -Runs are launched by calling the `dagster api` CLI command in a separate process/container as appropriate to the agent type. Run termination is handled by interrupting the user code process/container as appropriate for the compute substrate. - -When runs are launched, the user code process/container streams structured metadata (containing everything that is viewable in the integrated logs viewer in the Dagster Cloud UI) back to Dagster Cloud over a well-defined GraphQL interface. Structured metadata is stored in Amazon RDS, encrypted at rest. - -At present, the run worker also uploads the compute logs (raw `stdout` and `stderr` from runs) to Dagster Cloud. - -### Ingress - -No ingress is required from Dagster Cloud to user environments. All dataflow and network requests are unidirectional from user environments to Dagster Cloud. - -**Note:** To ensure that user code remains completely isolated in the user environment, Dagster Cloud does not currently support previews of Dagstermill notebooks. Supporting these previews securely is a roadmap feature. diff --git a/docs/content/dagster-cloud/deployment/serverless.mdx b/docs/content/dagster-cloud/deployment/serverless.mdx deleted file mode 100644 index df2e9975e37e3..0000000000000 --- a/docs/content/dagster-cloud/deployment/serverless.mdx +++ /dev/null @@ -1,379 +0,0 @@ ---- -title: "Serverless deployment in Dagster Cloud | Dagster Docs" ---- - -# Serverless deployment in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -Dagster Cloud Serverless is a fully managed version of Dagster Cloud, and is the easiest way to get started with Dagster. With Serverless, you can run your Dagster jobs without spinning up any infrastructure. - ---- - -## When to choose Serverless - -Serverless works best with workloads that primarily orchestrate other services or perform light computation. Most workloads fit into this category, especially those that orchestrate third-party SaaS products like cloud data warehouses and ETL tools. - -If any of the following are applicable, you should select [Hybrid deployment](/dagster-cloud/deployment/hybrid): - -- You require substantial computational resources. For example, training a large machine learning (ML) model in-process. -- Your dataset is too large to fit in memory. For example, training a large machine learning (ML) model in-process on a terabyte of data. -- You need to distribute computation across many nodes for a single run. Dagster Cloud runs currently execute on a single node with 4 CPUs. -- You don't want to add Dagster Labs as a data processor. - ---- - -## Limitations - -Serverless is subject to the following limitations: - -- Maximum of 100 GB of bandwidth per day -- Maximum of 4500 step-minutes per day -- Runs receive 4 vCPU cores, 16 GB of RAM and 128 GB of ephemeral disk -- Sensors receive 0.25 vCPU cores and 1 GB of RAM -- All Serverless jobs run in the United States - -Enterprise customers may request a quota increase by [contacting Sales](mailto:sales@dagsterlabs.com). - ---- - -## Getting started with Serverless - -- [With GitHub](#with-github) -- [With Gitlab](#with-gitlab) -- [Other Git providers or local development)](#other-bitbucket-or-local-development) -- [Adding secrets](#adding-secrets) - -### With GitHub - -If you are a GitHub user, our GitHub integration is the fastest way to get started. It uses a GitHub app and GitHub Actions to set up a repo containing skeleton code and configuration consistent with Dagster Cloud's best practices with a single click. - -When you create a new Dagster Cloud organization, you'll be prompted to choose Serverless or Hybrid deployment. Once activated, our GitHub integration will scaffold a new git repo for you with Serverless and Branch Deployments already configured. Pushing to the `main` branch will deploy to your `prod` Serverless deployment. Pull requests will spin up ephemeral [branch deployments](/dagster-cloud/managing-deployments/branch-deployments) using the Serverless agent. - -### With Gitlab - -If you are a Gitlab user, our Gitlab integration is the fastest way to get started. It uses a Gitlab app to set up a repo containing skeleton code and CI/CD configuration consistent with Dagster Cloud's best practices with a single click. - -When you create a new Dagster Cloud organization, you'll be prompted to choose Serverless or Hybrid deployment. Once activated, our Gitlab integration will scaffold a new git repo for you with Serverless and Branch Deployments already configured. Pushing to the `main` branch will deploy to your `prod` Serverless deployment. Pull requests will spin up ephemeral [branch deployments](/dagster-cloud/managing-deployments/branch-deployments) using the Serverless agent. - -### Other (BitBucket or local development) - -If you don't want to use our GitHub/Gitlab integrations, we offer a powerful CLI that you can use in another CI environment or on your local laptop. - -First, [create a new project](https://docs.dagster.io/getting-started/create-new-project#create-a-new-project) with the Dagster open-source CLI. - -```shell -pip install dagster -dagster project from-example \ - --name my-dagster-project \ - --example assets_modern_data_stack -``` - - - Once scaffolded, add dagster-cloud as a dependency in your{" "} - setup.py file. - - -Next, install the [dagster-cloud CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli) and log in to your org. **Note**: The CLI requires a recent version of Python 3 and Docker. - -```shell -pip install dagster-cloud -dagster-cloud configure -``` - -You can also configure the `dagster-cloud` tool noninteractively; see [the CLI docs](/dagster-cloud/managing-deployments/dagster-cloud-cli#environment-variables-and-cli-options) for more information. - -Add `dagster-cloud` as a dependency to `my-dagster-project/setup.py`: - -```python -install_requires=[ - "dagster", - "dagster-cloud", # add this line - "dagster-airbyte", -``` - -Finally, deploy your project with Dagster Cloud Serverless: - -```shell -dagster-cloud serverless deploy-python-executable ./my-dagster-project \ - --location-name example \ - --package-name assets_modern_data_stack -``` - -**Note:** Windows users should use the `deploy` command instead of `deploy-python-executable`. - -### Adding secrets - -Often you'll need to securely access secrets from your jobs. Dagster Cloud supports several methods for adding secrets - refer to the [Dagster Cloud environment variables and secrets documentation](/dagster-cloud/managing-deployments/environment-variables-and-secrets) for more info. - -### Adding dependencies - -Any dependencies specified in either `requirements.txt` or `setup.py` will be installed for you automatically by the Dagster Cloud Serverless infrastructure. - ---- - -## Customizing the runtime environment - -Dagster Cloud Serverless packages your code as [PEX](https://pex.readthedocs.io) files and deploys them on Docker images. Using PEX files significantly reduces the time to deploy since it does not require building a new Docker image and provisioning a new container for every code change. Many apps will work fine with the default Dagster Cloud Serverless setup. However, some apps may need to make changes to the runtime environment, either to include data files, use a different base image, different Python version, or install some native dependencies. You can customize the runtime environment using various methods described below. - -### Including data files - -To add data files to your deployment, use the [Data Files Support](https://setuptools.pypa.io/en/latest/userguide/datafiles.html) built into Python's `setup.py`. This requires adding a `package_data` or `include_package_data` keyword in the call to `setup()` in `setup.py`. For example, given this directory structure: - - - setup.py - - my_dagster_project/ - - __init__.py - - repository.py - - data/ - - file1.txt - - file2.csv - -If you want to include the `data` folder, modify your `setup.py` to add the `package_data` line: - -```python -# setup.py -from setuptools import find_packages, setup - -if __name__ == "__main__": - setup( - name="my_dagster_project", - packages=find_packages(exclude=["my_dagster_project_tests"]), - # Add the following line. Here "data/*" is relative to the my_dagster_project sub directory. - package_data={"my_dagster_project": ["data/*"]}, - install_requires=[ - "dagster", - ... - ], - ) -``` - -### Using a different Python version - -The default version of Python for Serverless deployments is Python 3.8. Versions 3.9, 3.10, and 3.11 are also supported. You can specify the version you want by updating your GitHub workflow or using the `--python-version` command line argument: - -- **With GitHub**: Change the `python_version` parameter for the `build_deploy_python_executable` job in your `.github/workflows` files. For example: - - ```yaml - - name: Build and deploy Python executable - if: env.ENABLE_FAST_DEPLOYS == 'true' - uses: dagster-io/dagster-cloud-action/actions/build_deploy_python_executable@pex-v0.1 - with: - dagster_cloud_file: "$GITHUB_WORKSPACE/project-repo/dagster_cloud.yaml" - build_output_dir: "$GITHUB_WORKSPACE/build" - python_version: "3.9" # Change this value to the desired Python version - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ``` - -- **With the CLI**: Add the `--python-version` CLI argument to the deploy command to specify the registry path to the desired base image: - - ```shell - dagster-cloud serverless deploy-python-executable --location-name=my_location --python-version=3.9 - ``` - -### Using a different base image or using native dependencies - -Dagster Cloud runs your code on a Docker image that we build as follows: - -1. The standard Python "slim" [Docker image](https://hub.docker.com/\_/python), such as `python:3.8-slim` is used as the base. -2. The `dagster-cloud[serverless]` module installed in the image. - -As far as possible, add all dependencies by including the corresponding native Python bindings in your `setup.py`. When that is not possible, you can build and upload a custom base image that will be used to run your Python code. - -To build and upload the image, use the command line: - -1. Build your Docker image using `docker build` or your usual Docker toolchain. Ensure the `dagster-cloud[serverless]` dependency is included. You can do this by adding the following to your `Dockerfile`: - - ```shell - RUN pip install "dagster-cloud[serverless]" - ``` - -2. Upload your Docker image to Dagster Cloud using the `upload-base-image` command. Note that this command prints out the tag used in Dagster Cloud to identify your image: - - ```shell - $ dagster-cloud serverless upload-base-image local-image:tag - - ... - To use the uploaded image run: dagster-cloud deploy-python-executable ... --base-image-tag=sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 - ``` - -3. To use a Docker image you have published to Dagster Cloud, use the `--base-image-tag` tag printed out by the above command. - - - **With GitHub**: Set the `SERVERLESS_BASE_IMAGE_TAG` environment variable in your GitHub Actions configuration (usually at `.github/workflows/deploy.yml`): - - ```yaml - env: - DAGSTER_CLOUD_URL: ... - DAGSTER_CLOUD_API_TOKEN: ... - SERVERLESS_BASE_IMAGE_TAG: "sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6" - ``` - - - **With the CLI**: Add the `--base-image-tag` CLI argument to the deploy command: - - ```shell - dagster-cloud serverless deploy-python-executable \ - --location-name example \ - --package-name assets_modern_data_stack \ - --base-image-tag sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 - ``` - -### Disabling PEX-based deploys - -Prior to using PEX files, Dagster Cloud deployed code using Docker images. This feature is still available. To deploy using a Docker image instead of PEX: - -- **With GitHub**: Delete the `ENABLE_FAST_DEPLOYS: 'true'` line in your GitHub Actions configuration (usually at `.github/workflows/deploy.yml`): - - ```yaml - env: - DAGSTER_CLOUD_URL: ... - DAGSTER_CLOUD_API_TOKEN: ... - # ENABLE_FAST_DEPLOYS: 'true' # disabled - ``` - -- **With the CLI**: Use the `deploy` command instead of the `deploy-python-executable` command: - - ```shell - dagster-cloud serverless deploy \ - --location-name example \ - --package-name assets_modern_data_stack - ``` - -The Docker image deployed can be customized using either lifecycle hooks or customizing the base image. - - - - -This method is the easiest to set up, and does not require setting up any additional infrastructure. - -In the root of your repo, you can provide two optional shell scripts: `dagster_cloud_pre_install.sh` and `dagster_cloud_post_install.sh`. These will run before and after Python dependencies are installed. They are useful for installing any non-Python dependencies or otherwise configuring your environment. - - - - -This method is the most flexible, but requires setting up a pipeline outside of Dagster to build a custom base image. - -The default base image is `debian:bullseye-slim`, but it can be changed. - -- **With GitHub**: Provide a `base_image` input parameter to the **Build and deploy** step in your GitHub Actions configuration (usually at `.github/workflows/deploy.yml`): - - ```yaml - - name: Build and deploy to Dagster Cloud serverless - uses: dagster-io/dagster-cloud-action/actions/serverless_prod_deploy@v0.1 - with: - dagster_cloud_api_token: ${{ secrets.DAGSTER_CLOUD_API_TOKEN }} - location: ${{ toJson(matrix.location) }} - # Use a custom base image - base_image: "my_base_image:latest" - organization_id: ${{ secrets.ORGANIZATION_ID }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ``` - -- **With the CLI**: Add the `--base-image` CLI argument to the deploy command to specify the registry path to the desired base image: - - ```shell - dagster-cloud serverless deploy --location-name=my_location --base-image=my_base_image:latest - ``` - - - - ---- - -## Transitioning to Hybrid - -If your organization begins to hit the limitations of Serverless, you should transition to a Hybrid deployment. Hybrid deployments allow you to run an [agent in your own infrastructure](/dagster-cloud/deployment/agents) and give you substantially more flexibility and control over the Dagster environment. - -To switch to Hybrid, navigate to **Status > Agents** in your Dagster Cloud account. On this page, you can disable the Serverless agent on and view instructions for enabling Hybrid. - ---- - -## Security and data protection - -Unlike Hybrid, Serverless Deployments on Dagster Cloud require direct access to your data, secrets and source code. - -- Dagster Cloud Serverless does not provide persistent storage. Ephemeral storage is deleted when a run concludes. -- Secrets and source code are built into the image directly. Images are stored in a per-customer container registry with restricted access. -- User code is securely sandboxed using modern container sandboxing techniques. -- All production access is governed by industry-standard best practices which are regularly audited. - ---- - -## Whitelisting Dagster's IP addresses - -Serverless code will make requests from one of the following IP addresses. You may need to whitelist them for services your code interacts with. - -```plain -34.216.9.66 -35.162.181.243 -35.83.14.215 -44.230.239.14 -44.240.64.133 -52.34.41.163 -52.36.97.173 -52.37.188.218 -52.38.102.213 -52.39.253.102 -52.40.171.60 -52.89.191.177 -54.201.195.80 -54.68.25.27 -54.71.18.84 -``` - -**Note**: Additional IP addresses may be added over time. This list was last updated on **January 31, 2024.** - ---- - -## Run isolation - -Dagster Cloud Serverless offers two settings for run isolation: isolated and non-isolated. Non-isolated runs are for iterating quickly and trade off isolation for speed. Isolated runs are for production and compute heavy Assets/Jobs. - -### Isolated runs (default) - -Isolated runs each take place in their own container with their own compute resources: 4 cpu cores and 16GB of RAM. - -These runs may take up to 3 minutes to start while these resources are provisioned. - -When launching runs manually, select `Isolate run environment` in the Launchpad to launch an isolated runs. Scheduled, sensor, and backfill runs are always isolated. - - - -_Note: if non-isolated runs aren't enabled (see the section below), the toggle won't appear and all runs will be isolated._ - -### Non-isolated runs - -This can be enabled or disabled in deployment settings with - -```yaml -non_isolated_runs: - enabled: True -``` - -Non-isolated runs provide a faster start time by using a standing, shared container for each code location. - -They have fewer compute resources: 0.25 vCPU cores and 1GB of RAM. These resources are shared with other processes for a code location like sensors. As a result, it's recommended to use isolated runs for compute intensive jobs and asset materializations. - -While launching runs from the Launchpad, uncheck `Isolate run environment`. When materializing an asset, shift-click `Materialize all` and uncheck it in the modal. - - - -By default only one non-isolated run will execute at once. While a run is in progress, the the Launchpad will swap to only launching isolated runs. - -This limit can be configured in [deployment settings](/dagster-cloud/managing-deployments/deployment-settings-reference#non-isolated-runs). Take caution; The limit is in place to help wih avoiding crashes due to OOMs. - -```yaml -non_isolated_runs: - enabled: True - max_concurrent_non_isolated_runs: 1 -``` diff --git a/docs/content/dagster-cloud/getting-started.mdx b/docs/content/dagster-cloud/getting-started.mdx deleted file mode 100644 index 115e2295a5e6b..0000000000000 --- a/docs/content/dagster-cloud/getting-started.mdx +++ /dev/null @@ -1,480 +0,0 @@ ---- -title: Getting started with Dagster Cloud | Dagster Cloud -description: "Get up and running with Dagster Cloud." - -platform_type: "cloud" ---- - -# Getting started with Dagster Cloud - -Welcome to Dagster Cloud! - -In this guide, we'll give you everything you need to get up and running with Dagster Cloud, including: - -- Creating your Dagster Cloud account -- Selecting a deployment type -- Deploying your code -- Setting up CI/CD -- Setting up environment variables and secrets - -Let's get started! - ---- - -## Step 1: Create a Dagster Cloud account and organization - -If you don't already have a Dagster Cloud account, [sign up for one](https://dagster.cloud/signup) before continuing. - -You'll also be asked to create a Dagster Cloud organization. When your organization is created, a single deployment, named `prod`, will also be created. Refer to the [Managing deployments guide](/dagster-cloud/managing-deployments/managing-deployments) for more information about deployments in Dagster Cloud. - ---- - -## Step 2: Select a deployment type - -In this step, you'll select the type of deployment you want to use: **Serverless** or **Hybrid**. - - - - - - - - - - - - - - - - - - - - - -
    - Type - - How it works - May be a good fit if...
    - Serverless - - In a Serverless deployment, Dagster manages the infrastructure needed to - run Dagster jobs. This means you don't need to spin up or manage any - infrastructure. Refer to the{" "} - - Serverless deployment documentation - {" "} - for more info. - -
      -
    • - You want to try Dagster Cloud without a lot of time investment -
    • -
    • You don't want to focus on infrastructure management
    • -
    • - Your Dagster jobs aren't computationally expensive or memory - intensive -
    • -
    -
    - Hybrid - - In a Hybrid deployment, your Dagster code is executed in your - infrastructure. The agent runs in your infrastructure, executes your - code, and streams metadata about code execution and assets and jobs (for - visualization) over HTTPS back to Dagster Cloud. Refer to the{" "} - - Hybrid deployment documentation - {" "} - for more info. -

    -

    - Dagster Cloud supports a variety of agents, each with its own prerequisites - for setup. Refer to the - Agent documentation - for more info. -
    - -
    - -Select the deployment type you want to use and [proceed to the next step](#step-3-deploy-your-code). **Note**: If you find your deployment type isn't right for you, it can be changed at any time. - ---- - -## Step 3: Deploy your code - -Now that you've selected a deployment type, the next step is to tell Dagster Cloud the location of your Dagster code. In addition to this, you'll set up Continuous Integration (CI) and Continuous Deployment (CD). Once completed, [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) and continuous code location deployments will be enabled for your Dagster project. - -The steps for accomplishing this vary depending on the **deployment type** you selected in the previous step: - - - - -Click the tab for your deployment type - [**Serverless**](#serverless) or [**Hybrid**](#hybrid) - to view what's next. - - - - -### Serverless - -For **Serverless deployments**, there are two ways to deploy your code to Dagster Cloud: - -- [**Start from a template**](#use-a-template) - Use one of our quickstart templates to get up and running. All templates come with CI/CD already configured and will be cloned to a new GitHub repository. - -- [**Import an existing project**](#import-an-existing-project) - Import an existing GitHub repository using our GitHub integration or the [dagster-cloud CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli). **Note**: If using the GitHub integration, Dagster Cloud will automatically set up CI/CD for you. - -#### Use a template - - - - -##### GitHub - -1. Click **Select** to select a template. -2. Sign in to your GitHub account, if prompted. -3. In Dagster Cloud, define the following: - - **Git scope** - Select the organization or account to create the new repository in. - - **Repository Name** - Enter a name for the new repository. - - Check the **Make git repository private** box to make the repository private. -4. When finished, click **Clone and deploy**. - -When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). - ---- - - - - -##### GitLab - -1. Click **Select** to select a template. -2. Sign in to your Gitlab account, if prompted. -3. In Dagster Cloud, define the following: - - **Namespace** - Select the group or account to create the new project in. - - **Project** - Enter a name for the new project. - - Check the **Make git project private** box to make the project private. -4. When finished, click **Clone and deploy**. - -When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). - ---- - - - - -#### Import an existing project - -If you have existing Dagster code, you can use Dagster's GitHub / Gitlab app or the dagster-cloud CLI. - - - - -##### GitHub - -Using the GitHub integration to import an existing GitHub repository also sets up CI/CD for you. - -Before you get started, note that the repository must have a [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml) in order for Dagster Cloud to deploy it. This file defines the [code locations](/concepts/code-locations) in your Dagster code. - -If this file doesn't currently exist, create it in the root of your repository with the following code: - -```yaml -# dagster_cloud.yaml - -locations: - - location_name: my_location_name # the name of the code location - code_source: - package_name: hackernews # the name of the python package associated with the code location -``` - -After you've committed the file to the repository, come back to Dagster Cloud to complete the import process: - -1. Click the **Import an existing project** tab. -2. Sign in to your GitHub account, if prompted. -3. In Dagster Cloud, define the following: - - **Git scope** - Select the organization or account that the repository is in. - - **Repository** - Select the repository. -4. Click **Deploy**. - -When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). - - - - -##### GitLab - -Using the Gitlab integration to import an existing Gitlab project also sets up CI/CD for you. - -Before you get started, note that the project must have a [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml) in order for Dagster Cloud to deploy it. This file defines the [code locations](/concepts/code-locations) in your Dagster code. - -If this file doesn't currently exist, create it in the root of your repository with the following code: - -```yaml -# dagster_cloud.yaml - -locations: - - location_name: my_location_name # the name of the code location - code_source: - package_name: hackernews # the name of the python package associated with the code location -``` - -After you've committed the file to the project, come back to Dagster Cloud to complete the import process: - -1. Click the **Import an existing project** tab. -2. Sign in to your Gitlab account, if prompted. -3. In Dagster Cloud, define the following: - - **Namespace** - Select the group or account that the project is in. - - **Project** - Select the project. -4. Click **Deploy**. - -When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). - - - - -##### dagster-cloud CLI - - - Heads up! Using the dagster-cloud CLI requires a recent - version of Python 3 and Docker. - - -To complete this step using the CLI, you can use your own Dagster code or the [Dagster starter kit](https://github.com/dagster-io/quickstart-etl). The starter kit is a template with everything you need to get started using Serverless in Dagster Cloud, including CI/CD configuration and the required [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml). - -- **If using the template**, [clone the repository](https://github.com/dagster-io/quickstart-etl) to your local environment. - -- **If not using the template**, add `dagster-cloud` as a dependency in `setup.py`. \[Click here for an example]\(. This is already done for you if using the starter kit. - -After you've finished setting up your local project, move on to deploying using the CLI: - -1. To install the dagster-cloud CLI, run: - - ```shell - pip install dagster-cloud - ``` - -2. Next, you'll need to authenticate to Dagster Cloud. Run the following command and follow the prompts to log in: - - ```shell - dagster-cloud config setup - ``` - -3. After you've successfully authenticated, run the following to deploy your code to Dagster Cloud: - - ```shell - dagster-cloud serverless deploy \ - --location-name "" \ # name of the code loation - --package-name "" \ # name of the Python package associated with the code location - ``` - -When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). - - - - - - -### Hybrid - -To set up Hybrid deployment and deploy your code, you'll need to: - -1. Set up an agent -2. Configure CI/CD for your project. We'll walk you through this in [Step 4](#step-4-configure-cicd-for-your-project). - -For most Hybrid deployments - with the exception of those using a local agent - you'll need to create a Docker image containing your Dagster code and then add a code location to Dagster Cloud that references the image. - -1. The dialog that displays will contain a pre-generated [agent token](/dagster-cloud/account/managing-user-agent-tokens) and details about the agents currently supported by Dagster Cloud. - - **Note**: Keep this token handy - you'll need it to set up CI/CD in the next step. - -2. Follow the steps for setting up and deploying your agent: - - - [Amazon Elastic Container Service (ECS)](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc) - - [Docker](/dagster-cloud/deployment/agents/docker) - - [Kubernetes](/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent) - - [Local](/dagster-cloud/deployment/agents/local) - -3. The dialog will indicate when Dagster Cloud receives an agent heartbeat. Click **Continue**. - -When finished, [continue to the next step](#step-4-configure-cicd-for-your-project). - - - - ---- - -## Step 4: Configure CI/CD for your project - - - Skip to the next step if using Serverless. This step is only - required for Hybrid deployments. - - -To finish setting up your Hybrid deployment, you'll configure CI/CD for your Dagster project. How this is accomplished depends on your CI/CD provider: - -- **GitHub** - If using GitHub, you can use our GitHub Action workflow to set up CI/CD for your project. -- **Another CI/CD provider** - If you're not using GitHub, you can configure CI/CD using the dagster-cloud CLI. - - - - -### GitHub Actions - -To set up continuous integration using GitHub Actions, you can use your own Dagster code or the [Dagster Cloud Hybrid Quickstart](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart). The quickstart is a template with everything you need to get started using Hybrid deployment in Dagster Cloud. - -- **If using the template**, [clone the repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart). - -- **If not using the template**, copy the GitHub workflow files (`.github/workflows`) from the [Hybrid quickstart repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/tree/main/.github/workflows) and add them to your repository. This is already done for you if using the quickstart. - -**Configure the GitHub workflow YAML file as described below**. The GitHub workflow deploys your code to Dagster Cloud using these steps: - -1. Initialize - Check out your code and validate `dagster_cloud.yaml`. - -2. Docker image push - Build a Docker image from your code and upload it to your container registry. - -3. Deploy to Dagster Cloud - Update code locations in Dagster Cloud to use the new Docker image. - -**To configure the workflow**, follow these steps: - -1. In the repository, set the `DAGSTER_CLOUD_API_TOKEN` GitHub action secret. This is the Dagster Cloud agent token from the previous section. Refer to the [agent tokens documentation](/dagster-cloud/account/managing-user-agent-tokens#managing-agent-tokens) for more info. - - Refer to the [GitHub docs](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) for more info about GitHub Action secrets. - -2. In your [`dagster-cloud-deploy.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/dagster-cloud-deploy.yml), set the `DAGSTER_CLOUD_ORGANIZATION` environment variable to your Dagster Cloud organization name. - -3. In your `dagster-cloud-deploy.yml`, uncomment the step that is relevant to your Docker container registry. For example, if using DockerHub, you'd uncomment the `DockerHub` step in these files. Ensure you have set up the relevant secrets for building and uploading your Docker images. - -After making the above changes and commiting the workflow file, the CI process should be triggered to deploy your GitHub repository to Dagster Cloud. During the deployment, the agent will attempt to load your code and update the metadata in Dagster Cloud. Once finished, you should see the GitHub Action complete successfully and also be able to see the code location under the **Deployment** tag in Dagster Cloud. - -When finished, [continue to the next step](#step-5-set-up-environment-variables-and-secrets). - - - - -### Other CI/CD provider - -For continuous integration using a CI/CD provider other than GitHub, your system should use the `dagster-cloud ci` subcommand to deploy code locations to Dagster Cloud. - -Ensure that you have created a `dagster_cloud.yaml` file as described in [the quickstart](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/). For detailed documentation about this file see the [`dagster_cloud.yaml` reference](/dagster-cloud/managing-deployments/dagster-cloud-yaml). The following steps are typically implemented in the CI/CD workflow: - -1. Set the build environment variables. Note that all variables are required: - - - `DAGSTER_CLOUD_ORGANIZATION` - The name of your organization in Dagster Cloud. - - `DAGSTER_CLOUD_API_TOKEN` - A Dagster Cloud API token. **Note**: This is a sensitive value and should be stored as a CI/CD secret, if possible. - - `DAGSTER_BUILD_STATEDIR` - A path to a blank or non-existent temporary directory on the build machine. This directory is used to store local state during the build. - -2. Run the configuration check: - - ```shell - dagster-cloud ci check --project-dir=. - ``` - - This is an optional step but useful to validate the contents of your `dagster_cloud.yaml` and connection to Dagster Cloud. - -3. Initialize the build session: - - ```shell - dagster-cloud ci init --project-dir=. - ``` - - This reads the `dagster_cloud.yaml` configuration and initializes the `DAGSTER_BUILD_STATEDIR`. - -4. Build and upload Docker images for your code locations. - - The Docker image should contain a Python environment with `dagster`, `dagster-cloud`, and your code. For reference, see the [example Dockerfile](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/Dockerfile) in our quickstart repository. The example uses `pip install .` to install the code including the dependencies specified in [`setup.py`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/setup.py). - - It is a good idea to use a unique image tag for each Docker build. You can build one image per code location or a shared image for multiple code locations. As an example image tag, you can use the git commit SHA: - - ```shell - export IMAGE_TAG=`git log --format=format:%H -n 1` - ``` - - Use this tag to build and upload your Docker image, for example: - - ```shell - docker build . -t ghcr.io/org/dagster-cloud-image:$IMAGE_TAG - docker push ghcr.io/org/dagster-cloud-image:$IMAGE_TAG - ``` - - The upload step is specific to your Docker container registry and will require authentication. The only requirement is that the registry you upload to must match the registry specified in `dagster_cloud.yaml`. - -5. Update the build session with the Docker image tag. For each code location you want to deploy, run the following command passing the `IMAGE_TAG` used in the previous step: - - ```shell - dagster-cloud ci set-build-output --location-name=code-location-a --image-tag=IMAGE_TAG - ``` - - This command does not deploy the code location but just updates the local state in `DAGSTER_BUILD_STATEDIR`. - -6. Deploy to Dagster Cloud: - - ```shell - dagster-cloud ci deploy - ``` - - This command updates the code locations in Dagster Cloud. Once this finishes successfully, you should be able to see the code locations under the **Deployments** tab in Dagster Cloud. - -**Note**: Creating Branch Deployments using the CLI requires some additional steps. Refer to the [Branch Deployments with the dagster-cloud CLI guide](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments) for more info. - -When finished, [continue to the next step](#step-5-set-up-environment-variables-and-secrets). - - - - ---- - -## Step 5: Set up environment variables and secrets - -Congrats! At this point, your Dagster Cloud deployment should be up and running. To ensure the external services you use in your Dagster project work correctly, start setting up your [environment variables](/dagster-cloud/managing-deployments/environment-variables-and-secrets). Using environment variables, you can securely pass in sensitive info like passwords, API tokens, etc. - ---- - -## Next steps - -From here, you can: - -- [Invite your team](/dagster-cloud/account/managing-users) -- [Configure authentication for your account](/dagster-cloud/account/authentication) -- [Set up monitoring and alerting](/dagster-cloud/managing-deployments/setting-up-alerts) -- [Learn more setting up CI using Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) diff --git a/docs/content/dagster-cloud/insights.mdx b/docs/content/dagster-cloud/insights.mdx deleted file mode 100644 index 653dbe11fc04f..0000000000000 --- a/docs/content/dagster-cloud/insights.mdx +++ /dev/null @@ -1,166 +0,0 @@ ---- -title: "Dagster Cloud Insights | Dagster Docs" -description: "Visibility into historical usage and cost metrics." - -platform_type: "cloud" ---- - -# Dagster Cloud Insights (Experimental) - - - This feature is considered experimental. - - -Using Dagster Cloud Insights, you can gain visibility into historical usage and cost metrics such as Dagster Cloud run duration, credit usage, and failures. You can also [integrate other external metrics](#integrating-external-metrics) into the Insights UI and [export metrics from Dagster Cloud](#exporting-metrics). - ---- - -## About Insights - -Launched in October 2023, Insights is currently an **experimental feature.** This means we’re still collecting feedback on how Insights works and could make changes based on that feedback. - -**Note**: These changes may include breaking changes to some aspects of the feature, though we will attempt to avoid it. - -### Getting access - -Currently, Insights can only be enabled by the Dagster team. To request access, you can: - -- Reach out to us directly in your company's Slack channel -- Request access in the Dagster Slack #dagster-insights channel - -**Note**: While visualizing and exploring [Dagster-generated metrics](#available-metrics) within the UI will be available to all cloud users, [integrating external metrics](#integrating-external-metrics) into Insights is an **Enterprise-only feature**. - -### Future plans - -As part of the experimental release, out-of-the-box utilities for capturing external metadata are limited to Snowflake. Refer to the [Integrating external metrics section](#integrating-external-metrics) for more info. - -Additional data sources and capabilities for capturing arbitrary metadata, as well as native tooling for enabling these capabilities on pipelines using ops and jobs, will be released in the coming weeks. - ---- - -## Accessing Insights in the Dagster UI - -To access this feature, click **Insights** in the top navigation bar in the UI: - - - - - -The left navigation panel on this page contains a list of [available metrics](#available-metrics). For each metric, the daily, weekly, or monthly aggregated values are displayed in the graph. - -These metrics are updated on a daily basis. - -### Available metrics - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Metric - Description
    Dagster credits - The Dagster credit cost associated with computing this object. Dagster - credits are charged for every step that is run, and for every asset that - is materialized. For more information,{" "} - - refer to the pricing FAQ - - . -
    Compute duration - The time spent computing steps. For jobs that run steps in parallel, the - compute duration may be longer than the wall clock time it takes for the - run to complete. -
    Materializations - The number of asset materializations associated with computing this - object. -
    Observations - The number of{" "} - asset observations{" "} - associated with computing this object. -
    Step failures - The number of times steps failed when computing this object.{" "} - Note: Steps that retry and succeed are not included in - this metric. -
    Step retries - The number of times steps were retried when computing this object. -
    Asset check warnings - The number of asset checks{" "} - that produced warnings. -
    Asset check errors - The number of asset checks{" "} - that produced errors. -
    Retry compute - The time spent computing steps, including time spent retrying failed - steps. For jobs that run steps in parallel, the compute duration may be - longer than the wall clock time it takes for the run to complete. -
    - ---- - -## Uses - -### Integrating external metrics - -External metrics, such as Snowflake credits, can be integrated into Dagster Insights. Insights currently supports integrating Snowflake metrics for: - -- **Usage generated by queries made to Snowflake resources.** Refer to the [Integrating direct Snowflake usage with Dagster Cloud Insights guide](/dagster-cloud/insights/integrating-snowflake) for more info. -- **Snowflake usage generated by dbt.** Refer to the [dbt + Snowflake + Dagster Cloud Insights guide](/dagster-cloud/insights/integrating-snowflake-and-dbt) for more info. - -### Exporting metrics - -Metrics in Dagster Cloud Insights can be exported using a GraphQL API endpoint. Refer to the [Exporting Insights metrics from Dagster Cloud guide](/dagster-cloud/insights/exporting-insights-metrics) for details. diff --git a/docs/content/dagster-cloud/insights/integrating-external-metrics.mdx b/docs/content/dagster-cloud/insights/integrating-external-metrics.mdx deleted file mode 100644 index fec679295130a..0000000000000 --- a/docs/content/dagster-cloud/insights/integrating-external-metrics.mdx +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: "Integrating external metrics into Dagster Cloud Insights | Dagster Docs" -description: "Integrating external metrics with Dagster Insights." - -platform_type: "cloud" ---- - -# Integrating external metrics into Dagster Cloud Insights (Experimental) - - - This feature is considered experimental. - - -External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster Cloud via an API. - ---- - -## Limitations - -Before you start, note that: - -- This is a Dagster Cloud Enterprise feature -- Up to two million individual data points may be added to Insights, per month -- External metrics data will only be retained for 90 days - ---- - -## Supported integrations - -Currently, Insights supports integrating Snowflake metrics for: - -- **Usage generated by queries made to Snowflake resources.** Refer to the [Integrating direct Snowflake usage with Dagster Cloud Insights guide](/dagster-cloud/insights/integrating-snowflake) for more info. -- **Snowflake usage generated by dbt.** Refer to the [dbt + Snowflake + Dagster Cloud Insights guide](/dagster-cloud/insights/integrating-snowflake-and-dbt) for more info. diff --git a/docs/content/dagster-cloud/insights/integrating-snowflake-and-dbt.mdx b/docs/content/dagster-cloud/insights/integrating-snowflake-and-dbt.mdx deleted file mode 100644 index ca019aee90116..0000000000000 --- a/docs/content/dagster-cloud/insights/integrating-snowflake-and-dbt.mdx +++ /dev/null @@ -1,150 +0,0 @@ ---- -title: "Integrating Snowflake + dbt with Dagster Cloud Insights | Dagster Docs" -description: "Integrating external metrics with Dagster Insights." - -platform_type: "cloud" ---- - -# Integrating Snowflake + dbt with Dagster Cloud Insights (Experimental) - - - This feature is considered experimental. - - -External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster Cloud via an API. - -If you use dbt to materialize tables in Snowflake, use this guide to integrate Snowflake metrics into the Insights UI. For instructions on integrating direct Snowflake queries, see [Integrating Direct Snowflake Usage with Dagster Cloud Insights](/dagster-cloud/insights/integrating-snowflake). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- A Dagster Cloud account on the Enterprise plan -- Access to the [Dagster Cloud Insights feature](/dagster-cloud/insights) -- Snowflake credentials which have access to the `snowflake.account_usage.query_history` table. For more information on granting access to this table, see the [Snowflake documentation](https://docs.snowflake.com/en/sql-reference/account-usage#enabling-the-snowflake-database-usage-for-other-roles). -- To install the following libraries: - - ```shell - pip install dagster dagster-cloud dagster-dbt dagster-snowflake - ``` - - **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.5.1 or newer**. - ---- - -## Step 1: Instrument your Dagster code - - - - -First, instrument the Dagster function with `dbt_with_snowflake_insights`: - -```python -from dagster_cloud.dagster_insights import dbt_with_snowflake_insights - - -@dbt_assets(...) -def my_asset(context: AssetExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_snowflake_insights` as below. - dbt_cli_invocation = dbt_resource.cli(["build"], context=context) - yield from dbt_with_snowflake_insights(context, dbt_cli_invocation) -``` - -This passes through all underlying events and emits an `AssetObservation` for each asset materialization. The observation contains the dbt invocation ID and unique ID that are recorded in the Dagster event log. - - - - -First, instrument the op function with `dbt_with_snowflake_insights`: - -```python -from dagster_cloud.dagster_insights import dbt_with_snowflake_insights - - -@op(out={}) -def my_dbt_op(context: OpExecutionContext, dbt: DbtCliResource): - # Typically you have a `yield from dbt_resource.cli(...)`. - # Wrap the original call with `dbt_with_snowflake_insights` as below. - dbt_cli_invocation = dbt.cli( - ["build"], context=context, manifest=dbt_manifest_path - ) - yield from dbt_with_snowflake_insights(context, dbt_cli_invocation) - -@job -def my_dbt_job(): - ... - my_dbt_op() - ... -``` - -This passes through all underlying events and emits an `AssetObservation` for each asset materialization. The observation contains the dbt invocation ID and unique ID that are recorded in the Dagster event log. - - - - ---- - -## Step 2: Update dbt_project.yml - -Next, add the following to your dbt project's `dbt_project.yml`: - -```yaml -query-comment: - comment: "snowflake_dagster_dbt_v1_opaque_id[[[{{ node.unique_id }}:{{ invocation_id }}]]]" - append: true -``` - -This allows you to add a comment to every query recorded in Snowflake's `query_history` table. The comments will contain the dbt invocation ID and unique ID. - -**Note**: Make sure to include `append: true`, as Snowflake strips leading comments. - ---- - -## Step 3: Create a metrics ingestion pipeline in Dagster - -The last step is to create a Dagster pipeline that joins asset observation events with the Snowflake query history and calls the Dagster Cloud ingestion API. Snowflake usage information is available at a delay, so this pipeline will run on a schedule to ingest Snowflake usage information from the previous hour. - -To do this, you'll need a Snowflake resource () that can query the `snowflake.account_usage.query_history` table. You can set up the ingestion pipeline like the following: - -```python -from dagster_snowflake import SnowflakeResource -from dagster import Definition, EnvVar - -from dagster_cloud.dagster_insights import ( - create_snowflake_insights_asset_and_schedule, -) - -snowflake_insights_definitions = create_snowflake_insights_asset_and_schedule( - start_date="2023-10-5-00:00", - snowflake_resource_key="snowflake_insights", -) - -defs = Definitions( - assets=[..., *snowflake_insights_definitions.assets], - schedules=[..., snowflake_insights_definitions.schedule], - resources={ - ..., - "snowflake_insights": SnowflakeResource( - account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), - user=EnvVar("SNOWFLAKE_PURINA_USER"), - password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), - ), - } -) -``` - -In this example, the `snowflake_resource_key` is a that has access to the `query_history` table. - -Once the pipeline runs, Snowflake credits will be visible in the **Insights** tab in the Dagster UI: - - - - diff --git a/docs/content/dagster-cloud/insights/integrating-snowflake.mdx b/docs/content/dagster-cloud/insights/integrating-snowflake.mdx deleted file mode 100644 index 498367fd390ef..0000000000000 --- a/docs/content/dagster-cloud/insights/integrating-snowflake.mdx +++ /dev/null @@ -1,101 +0,0 @@ ---- -title: "Integrating Direct Snowflake Usage with Dagster Cloud Insights | Dagster Docs" -description: "Integrating external metrics with Dagster Insights." - -platform_type: "cloud" ---- - -# Integrating Direct Snowflake Usage with Dagster Cloud Insights (Experimental) - - - This feature is considered experimental. - - -External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster Cloud via an API. - -If you use the [Snowflake Resource](/\_apidocs/libraries/dagster-snowflake) to query Snowflake, use this guide to integrate Snowflake metrics into the Insights UI. For instructions on integrating usage of dbt models which run in Snowflake, see [Integrating Snowflake + dbt with Dagster Cloud Insights](/dagster-cloud/insights/integrating-snowflake-and-dbt). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- A Dagster Cloud account on the Enterprise plan -- Access to the [Dagster Cloud Insights feature](/dagster-cloud/insights) -- Snowflake credentials which have access to the `snowflake.account_usage.query_history` table. For more information on granting access to this table, see the [Snowflake documentation](https://docs.snowflake.com/en/sql-reference/account-usage#enabling-the-snowflake-database-usage-for-other-roles). -- To install the following libraries: - - ```shell - pip install dagster dagster-cloud dagster-snowflake - ``` - - **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.5.8 or newer**. - ---- - -## Step 1: Replace your Snowflake resources - -The first step is to replace any existing Snowflake resources with . This resource is a drop-in replacement for the resource, but it also emits Snowflake usage metrics to the Dagster Cloud Insights API. - -```python -from dagster_cloud.dagster_insights import InsightsSnowflakeResource - -defs = Definitions( - resources={ - "snowflake": InsightsSnowflakeResource( - account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), - user=EnvVar("SNOWFLAKE_PURINA_USER"), - password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), - ), - } -) -``` - ---- - -## Step 2: Create a metrics ingestion pipeline in Dagster - -The second step is to create a Dagster pipeline that joins asset observation events with the Snowflake query history and calls the Dagster Cloud ingestion API. Snowflake usage information is available at a delay, so this pipeline will run on a schedule to ingest Snowflake usage information from the previous hour. - -To do this, you'll need a Snowflake resource () that can query the `snowflake.account_usage.query_history` table. You can set up the ingestion pipeline like the following: - -```python -from dagster import Definition, EnvVar - -from dagster_cloud.dagster_insights import ( - InsightsSnowflakeResource, - create_snowflake_insights_asset_and_schedule, -) - -snowflake_insights_definitions = create_snowflake_insights_asset_and_schedule( - start_date="2023-10-5-00:00", - snowflake_resource_key="snowflake_insights", -) - -defs = Definitions( - assets=[..., *snowflake_insights_definitions.assets], - schedules=[..., snowflake_insights_definitions.schedule], - resources={ - ..., - "snowflake_insights": InsightsSnowflakeResource( - account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), - user=EnvVar("SNOWFLAKE_PURINA_USER"), - password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), - ), - } -) -``` - -In this example, the `snowflake_resource_key` is a that has access to the `query_history` table. - -Once the pipeline runs, Snowflake credits will be visible in the **Insights** tab in the Dagster UI: - - - - diff --git a/docs/content/dagster-cloud/managing-deployments.mdx b/docs/content/dagster-cloud/managing-deployments.mdx deleted file mode 100644 index 5e4f3072b782c..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments.mdx +++ /dev/null @@ -1,64 +0,0 @@ ---- -title: "Managing Dagster Cloud deployments | Dagster Docs" ---- - -# Managing Dagster Cloud deployments - -Learn how to deploy your code to Dagster Cloud, use command line tools, set up CI/CD, and define environment variables. - - - - - - - - - ---- - -## Environment variables and secrets - - - - - - ---- - -## Branch deployments - - - - - - diff --git a/docs/content/dagster-cloud/managing-deployments/branch-deployments.mdx b/docs/content/dagster-cloud/managing-deployments/branch-deployments.mdx deleted file mode 100644 index eb51b97e35d41..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/branch-deployments.mdx +++ /dev/null @@ -1,236 +0,0 @@ ---- -title: Branch Deployments in Dagster Cloud | Dagster Docs -description: Develop and test in the cloud. ---- - -# Branch Deployments in Dagster Cloud - -Dagster Cloud provides out-of-the-box support for Continuous Integration (CI) with **Branch Deployments**. - -Branch Deployments automatically create staging environments of your Dagster code, right in Dagster Cloud. For every push to a branch in your git repository, Dagster Cloud will create a unique deployment, allowing you to preview the changes in the branch in real-time. - ---- - -## Understanding Branch Deployments - -- [Overview](#overview) -- [Benefits](#benefits) -- [Requirements](#requirements) -- [Supported platforms](#supported-platforms) -- [Limitations](#limitations) -- [Output handling](#output-handling) - -### Overview - -Think of a branch deployment as a branch of your data platform, one where you can preview changes without impacting production or overwriting a testing environment. - - - -Let's take a closer look: - -1. In your git repository, a new branch is created off of `main`. In the example above, this branch is named `feature-1`. - -2. Dagster Cloud is notified of the push and creates a branch deployment named `feature-1`. The branch deployment functions just like your `production` deployment of Dagster Cloud, but contains the Dagster code changes from the `feature-1` branch. - - In this example, the `feature-1` branch deployment 'talks' to a `cloned schema` in a database. This is completely separate from the `prod schema` associated with the `production` deployment. - -3. For every push to the `feature-1` branch, the `feature-1` branch deployment in Dagster Cloud is rebuilt and redeployed. - -### Benefits - -Now that you know how Branch Deployments work, **why should you use them**? - -- **Improved collaboration.** Branch Deployments make it easy for everyone on your team to stay in the loop on the latest Dagster changes. -- **Reduced development cycle.** Quickly test and iterate on your changes without impacting production or overwriting a testing environment. - -### Requirements - -To use Branch Deployments, you'll need a [Dagster Cloud account](https://dagster.cloud/). - -### Supported platforms - -Branch Deployments can be used with any git or CI provider. However, setup is easiest with the [Dagster GitHub app](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github) or [Dagster Gitlab app](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab) as parts of the process are automated. Refer to the [Setting up Branch Deployments section](#setting-up-branch-deployments) for more info. - -### Limitations - -The following aren't currently supported for Branch Deployments: - -- Use in Dagster Open Source -- Testing [backfills](/concepts/partitions-schedules-sensors/backfills) - -### Output handling - -Output created from a branch deployment - such as a database, table, etc. - won't be automatically removed from storage once a branch is merged or closed. Refer to the [Best practices section](#best-practices) for info on how to handle this. - ---- - -## Managing Branch Deployments - -- [Setting up Branch Deployments](#setting-up-branch-deployments) -- [Accessing a branch deployment](#accessing-a-branch-deployment) - -### Setting up Branch Deployments - -There are currently two ways to set up Branch Deployments for Dagster Cloud. In the table below: - -- **Platform** - The name of the git/CI platform, which is also a link to a setup guide -- **How it works** - Summary of how Branch Deployments work with the platform -- **May be a good fit if...** - A high-level summary of when the platform may be a good fit - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Platform - How it worksMay be a good fit if...
    - - GitHub - - GitHub Actions -
      -
    • - You use GitHub for version control -
    • -
    • You want Dagster to fully automate Branch Deployments
    • -
    -
    - - Gitlab - - Gitlab CI/CD -
      -
    • - You use Gitlab for version control -
    • -
    • You want Dagster to fully automate Branch Deployments
    • -
    -
    - - Other git/CI platform - - - - dagster-cloud CLI - - -
      -
    • - You don't use GitHub or Gitlab for version control -
    • -
    • You use an alternate CI platform
    • -
    • You want full control over Branch Deployment configuration
    • -
    -
    - -### Accessing a branch deployment - -Once configured, branch deployments can be accessed: - - - - -Every pull request in the repository contains a **View in Cloud** link: - - - -Clicking the link will open a branch deployment - or a preview of the changes - in Dagster Cloud. - - - - - - To access a Branch Deployment in Dagster Cloud, you need permissions that - grant you{" "} - - access to Branch Deployments - {" "} - and the code location associated with the Branch Deployment. - - -You can also access branch deployments directly in Dagster Cloud from the **deployment switcher**: - - - - - - ---- - -## Best practices - -To ensure the best experience when using Branch Deployments, we recommend: - -- **Configuring jobs based on environment**. Dagster automatically sets [environment variables](/dagster-cloud/managing-deployments/environment-variables-and-secrets#built-in-environment-variables) containing deployment metadata, allowing you to parameterize jobs based on the executing environment. Use these variables in your jobs to configure things like connection credentials, databases, and so on. This practice will allow you to use Branch Deployments without impacting production data. - -- **Creating jobs to automate output cleanup.** As Branch Deployments don't automatically remove [the output they create](#output-handling), you may want to create an additional Dagster job to perform the cleanup. - -**Want some help with implementation?** Check out the [Testing against production with Dagster Cloud Branch Deployments guide](/guides/dagster/branch_deployments) for a step-by-step look at implementing these best practices in your data pipelines. diff --git a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx b/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx deleted file mode 100644 index 43b9cdcf96e34..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx +++ /dev/null @@ -1,230 +0,0 @@ ---- -title: Using Branch Deployments (CI) with GitHub Actions | Dagster Cloud -description: Develop and test in the cloud. ---- - -# Using Branch Deployments (CI) with GitHub Actions - -This guide is applicable to Dagster Cloud. - -In this guide, we'll walk you through setting up Continuous Integration (CI) using [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) with GitHub Actions. - -Using this approach to branch deployments may be a good fit if: - -- You use GitHub for version control -- You want Dagster to fully automate Branch Deployments - -**If you don't use GitHub for version control or want full control over your setup**, check out the [Branch deployments with the `dagster-cloud CLI` guide](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **Organization Admin** permissions in Dagster Cloud -- **The ability to run a new agent in your infrastructure**. This isn't required if you're using [Serverless deployment](/dagster-cloud/deployment/serverless). -- **The ability to configure GitHub Actions for your repository**. This isn't required if you used the Dagster Cloud GitHub app to connect your repository as a [code location](/dagster-cloud/managing-deployments/code-locations). - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Create and configure an agent - - - - If using{" "} - Serverless deployment - - , this step isn't required. - - - - ---- - -## Step 3: Add GitHub workflow files to your repository - - - If you used the GitHub app to add the repository as a code location, this step - isn't required.{" "} - Skip to the next step. - - -In this step, you'll add the required GitHub workflow files to your GitHub repository. The files can be found in our [Hybrid quickstart repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart) or [Serverless quickstart repository](https://github.com/dagster-io/dagster-cloud-serverless-quickstart), depending on the agent you are using. - -Copy the following files to your repository (the linked files are shown in the Hybrid repo, there are equivalents in the Serverless repo). - -- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) -- [`.github/workflows/branch_deployments.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/branch_deployments.yml) -- [`.github/workflows/deploy.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/deploy.yml) - -In the next step, you'll modify these files to work with your Dagster Cloud setup. - ---- - -## Step 4: Configure the GitHub repository - -In this section, you'll: - -1. [Add the agent registry to `dagster_cloud.yaml`](#step-41-add-the-agent-registry-to-dagster_cloudyaml) -2. [Configure GitHub Action secrets](#step-42-configure-github-action-secrets) -3. [Update GitHub Workflow files](#step-43-update-github-workflow-files) -4. [Verify Action runs](#step-44-verify-action-runs) - -### Step 4.1: Add the agent registry to dagster_cloud.yaml - - - If you're using Serverless deployment, this step isn't required.{" "} - Skip to the next step. - - -In the [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml), replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-create-and-configure-an-agent). - -For example: - -```yaml -# dagster_cloud.yaml - -locations: - - location_name: example_location - code_source: - python_file: repo.py - build: - directory: ./example_location - registry: 764506304434.dkr.ecr.us-east-1.amazonaws.com/branch-deployment-agent -``` - -### Step 4.2: Configure GitHub Action secrets - - - If you used the GitHub app to add the repository as a code location, this step{" "} - isn't required. -

    -

    - Want to use secrets in your Dagster code? Check out the - Dagster Cloud environment variables and secrets guide - for more info. -
    - - - -**For Hybrid deployments,** repeat steps 3-6 for each of the secrets required for your registry type: - - - -Select the tab for your registry type to view instructions. - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `ORGANIZATION_ID` - Your Dagster Cloud organization ID -- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) -- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) -- `AWS_REGION` - The AWS region where your ECR registry is located - -The **Actions secrets** page should look like the following: - - - - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `ORGANIZATION_ID` - Your Dagster Cloud organization ID -- `DOCKERHUB_USERNAME` - Your DockerHub username -- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) - -The **Actions secrets** page should look like the following: - - - - - - -### Step 4.3: Update GitHub Workflow files - - - If you're using Serverless deployment, this step isn't required.{" "} - Skip to the next step. - - -In this step, you'll update the GitHub Workflow files in the repository to set up Docker registry access. - -In the `.github/workflows/deploy.yml` and `.github/workflows/branch_deployments.yml` files, uncomment the `step` associated with your registry. For example, for an Amazon ECR registry, you'd uncomment the following portion of the workflow files: - -```yaml -steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} -``` - -Save and commit the files to the repository. - -### Step 4.4: Verify Action runs - -The last step is to verify that the Action runs successfully. - -1. In the repository, click the **Actions** tab. -2. In the list of workflows, locate the latest branch deployment run. For example: - - A successful run for a Branch Deployment Action - -If the run finished successfully, that's it! - ---- - -## Step 5: Access the branch deployment - -Now that Branch Deployments are configured, you can check out the preview in Dagster Cloud. There are two ways to do this: - -- [From a pull request](#from-a-pull-request) -- [Directly in Dagster Cloud](#directly-in-dagster-cloud) - -### From a pull request - -Every pull request in the repository contains a **View in Cloud** link: - - - -Clicking the link will open a branch deployment - or a preview of the changes - in Dagster Cloud. - -### Directly in Dagster Cloud - -You can also access branch deployments directly in Dagster Cloud from the **deployment switcher**: - - diff --git a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx b/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx deleted file mode 100644 index cb03afe763452..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx +++ /dev/null @@ -1,204 +0,0 @@ ---- -title: Using Branch Deployments with Gitlab CI/CD | Dagster Cloud -description: Develop and test in the cloud. ---- - -# Using Branch Deployments with Gitlab CI/CD - -This guide is applicable to Dagster Cloud. - -In this guide, we'll walk you through setting up [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) with Gitlab CI/CD. - -Using this approach to branch deployments may be a good fit if: - -- You use Gitlab for version control -- You want Dagster to fully automate Branch Deployments - -**If you don't use Gitlab for version control or want full control over your setup**, check out the [Branch deployments with the `dagster-cloud CLI` guide](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments). - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- **Organization Admin** permissions in Dagster Cloud -- **The ability to run a new agent in your infrastructure**. This isn't required if you're using [Serverless deployment](/dagster-cloud/deployment/serverless). -- **The ability to configure Gitlab CI/CD for your project**. This isn't required if you used the Dagster Cloud Gitlab app to connect your project as a [code location](/dagster-cloud/managing-deployments/code-locations). - ---- - -## Step 1: Generate a Dagster Cloud agent token - - - ---- - -## Step 2: Create and configure an agent - - - - If using{" "} - Serverless deployment - - , this step isn't required. - - - - ---- - -## Step 3: Add Gitlab CI/CD script to your project - - - If you used the Gitlab app to add the project as a code location, this step - isn't required.{" "} - Skip to the next step. - - -In this step, you'll add the required Gitlab CI config file to your Gitlab project. The file can be found in our [CI/CD workflow repository](https://github.com/dagster-io/dagster-cloud-action) or [Serverless quickstart repository](https://github.com/dagster-io/dagster-cloud-serverless-quickstart), depending on the agent you are using. - -Copy the following files to your project (the linked files are shown for Hybrid repos, there are equivalents for Serverless repos). - -- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) -- [`.gitlab-ci.yml`](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/hybrid-ci.yml) - -In the next step, you'll modify these files to work with your Dagster Cloud setup. - ---- - -## Step 4: Configure the Gitlab project - -In this section, you'll: - -1. [Add the agent registry to `dagster_cloud.yaml`](#step-41-add-the-agent-registry-to-dagster_cloudyaml) -2. [Configure Gitlab CI/CD variables](#step-42-configure-gitlab-cicd-variables) -3. [Update Gitlab CI/CD script](#step-43-update-gitlab-cicd-script) -4. [Verify CI/CD pipeline runs](#step-44-verify-pipeline-runs) - -### Step 4.1: Add the agent registry to dagster_cloud.yaml - - - If you're using Serverless deployment, this step isn't required.{" "} - Skip to the next step. - - -In the [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml), replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-create-and-configure-an-agent). - -For example: - -```yaml -# dagster_cloud.yaml - -locations: - - location_name: example_location - code_source: - python_file: repo.py - build: - directory: ./example_location - registry: 764506304434.dkr.ecr.us-east-1.amazonaws.com/branch-deployment-agent -``` - -### Step 4.2: Configure Gitlab CI/CD variables - - - If you used the Gitlab app to add the project as a code location, this step - isn't required. -

    -

    - Want to use secrets in your Dagster code? Check out the - Dagster Cloud environment variables and secrets guide - for more info. -
    - - - -**For Hybrid deployments,** repeat steps 3-6 for each of the secrets required for your registry type: - - - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `DAGSTER_CLOUD_URL` - Your Dagster Cloud base URL (e.g. `https://my_org.dagster.cloud`) - - - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `DAGSTER_CLOUD_URL` - Your Dagster Cloud base URL (e.g. `https://my_org.dagster.cloud`) -- `DOCKERHUB_USERNAME` - Your DockerHub username -- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) - - - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `DAGSTER_CLOUD_URL` - Your Dagster Cloud base URL (e.g. `https://my_org.dagster.cloud`) -- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) -- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) -- `AWS_REGION` - The AWS region where your ECR registry is located - - - - - -- `DAGSTER_CLOUD_API_TOKEN` - The Dagster Cloud agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) -- `DAGSTER_CLOUD_URL` - Your Dagster Cloud base URL (e.g. `https://my_org.dagster.cloud`) -- `GCR_JSON_KEY` - Your GCR JSON credentials - - - - - -### Step 4.3: Update Gitlab CI/CD script - - - If you're using Serverless deployment, this step isn't required.{" "} - Skip to the next step. - - -In this step, you'll update the Gitlab CI/CD config to set up Docker registry access. - -In the `.gitlab-ci.yml` file, uncomment the `step` associated with your registry. For example, for the Gitlab container registry, you'd uncomment the following portion of the `.gitlab-ci.yml` file: - -```yaml -build-image: - ... - before_script: - # For Gitlab Container Registry - - echo $CI_JOB_TOKEN | docker login --username $CI_REGISTRY_USER --password-stdin $REGISTRY_URL -``` - -Save and commit the files to the project. - -### Step 4.4: Verify pipeline runs - -The last step is to verify that the Gitlab pipeline runs successfully. - -1. On the project page, click the **CI/CD** tab. -2. In the list of pipelines, locate the latest branch deployment run. For example: - - A successful run for a Branch Deployment Action - -If the run finished successfully, that's it! - ---- - -## Step 5: Access the branch deployment - -Now that Branch Deployments are configured, you can check out the preview in Dagster Cloud, by accessing the branch deployment from the **deployment switcher**: - - diff --git a/docs/content/dagster-cloud/managing-deployments/code-locations.mdx b/docs/content/dagster-cloud/managing-deployments/code-locations.mdx deleted file mode 100644 index 50f1b4a522da7..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/code-locations.mdx +++ /dev/null @@ -1,270 +0,0 @@ ---- -title: "Managing code locations in Dagster Cloud | Dagster Docs" ---- - -# Managing code locations in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, we'll cover the requirements for Dagster code, how to add code in Dagster Cloud, and how to add code using the `dagster-cloud` CLI. - -A [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml) is recommended to configure code locations for Dagster Cloud. - ---- - -## Understanding code locations - - - Learn by example? Check out the{" "} - - example repo - - , which is set up to run in Dagster Cloud. - - -A code location specifies a single Python package or file that defines your Dagster code. When you add a code location in Dagster Code, you're instructing the agent where to find your code. - -When you add or update a code location, the agent uses the location configuration to load your code and upload metadata about your jobs to Dagster Cloud. Each full deployment - for example, `prod` - can include code from one or more code locations. - -Note that, unlike Dagster Open Source, Dagster Cloud doesn't require a `workspace.yaml` file. Instead, you use the Dagster Cloud API to configure your workspace. You can still create a `workspace.yaml` file if you want to load your code in an open-source Dagster webserver instance, but doing so won't affect how your code is loaded in Dagster Cloud. - ---- - -## Dagster Cloud code requirements - -To work with Dagster Cloud, your Dagster code: - -- **Must be loaded from a single entry point, either a Python file or package.** This entry point can load repositories from other files or packages. - -- **Must run in an environment where the `dagster` and [`dagster-cloud`](/dagster-cloud/managing-deployments/dagster-cloud-cli) 0.13.2 or later Python packages are installed.** - -- **If using [Hybrid Deployment](/dagster-cloud/deployment/hybrid)**: - - - **And you're using an Amazon Elastic Container Service (ECS), Kubernetes, or Docker agent**, your code must be packaged into a Docker image and pushed to a registry your agent can access. Dagster Cloud doesn't need access to your image - your agent only needs to be able to pull it. - - Additionally, the Dockerfile for your image doesn't need to specify an entry point or command. These will be supplied by the agent when it runs your code using your supplied image. - - - **And you're using a local agent**, your code must be in a Python environment that can be accessed on the same machine as your agent. - -Additionally, note that: - -- Your code doesn't need to use the same version of Dagster as your agent -- Different code locations can use different versions of Dagster - ---- - -## Managing code locations in Dagster Cloud - - - Editor, Admin, or{" "} - Organization Admin permissions are required to manage code - locations in Dagster Cloud. -
    -
    - If you're an Editor or Admin, you can only manage - code locations in deployments where you're an Editor or - Admin - . -
    - -- [Adding code locations](#adding-code-locations) -- [Modifying code locations](#modifying-code-locations) -- [Redeploying code locations](#redeploying-code-locations) -- [Deleting code locations](#deleting-code-locations) - -### Adding code locations - -1. Sign in to your Dagster Cloud account. - -2. Click **Deployment**. - -3. Click **+ Add code location**. This will open a YAML editor with a schema describing the acceptable fields: - - Add Code Location Config Editor - -4. In the editor, define the code location's configuration: - - - - Set this key to either python_file: or{" "} - package_name:to specify where to find your code. - - - Optional. Define a specific Python executable if your - code should run in a certain Python environment. If left undefined, the - code will run using the default dagster command-line entry-point. - - - Required if not using a local agent. Specifies a Docker - image for use with containerized agents. - - - Specifies the directory to use to resolve relative Python imports while - loading your code. - - - Specifies only a specific Dagster repository should be loaded. - - - Optional. For agent versions 0.14.9 and later.{" "} - Customizes the code location for a specific execution environment. Refer - to the Agent documentation{" "} - for info on available configuration options for each agent type, - including declaring{" "} - - environment variables and secrets - - . - - - - For example, the following config specifies that a code location should include a secret named `my_secret` and run in a k8s namespace (`my_namespace`) whenever the Kubernetes agent creates a pod for the location: - - ```yaml - location_name: cloud-examples - image: dagster/dagster-cloud-examples:latest - code_source: - package_name: dagster_cloud_examples - container_context: - k8s: - namespace: my_namespace - env_secrets: - - my_secret - ``` - -5. When finished, click **Add code location**. - -The agent will attempt to load your code and send its metadata to Dagster Cloud. **Note**: This may take some time. - -Once your code has loaded, the location will show a green **Loaded** status and jobs will appear in Dagster Cloud. If the agent is unable to load your code, the location will show an error with more information. - -### Modifying code locations - -To modify a code location, click the **dropdown menu** to the right of the location. In the menu, click **Modify**: - - - -After a code location is updated, the agent will perform a rolling update of your code and jobs will update in Dagster Cloud. **Note**: Updating code won't interrupt any currently launched runs. - -### Redeploying code locations - -To reload your code and upload job metadata to Dagster Cloud without modifying the code location, click the **Redeploy** button: - - - -For example, if the agent was unable to pull your image due to a permissions issue that's since been addressed, clicking **Redeploy** will tell the agent to try again. - -### Deleting code locations - -To delete a code location, click the **dropdown menu** to the right of the location. In the menu, click **Remove**: - - - -When prompted, confirm the deletion. - ---- - -## Managing code locations using the dagster-cloud CLI - -You can also use the `dagster-cloud workspace` CLI commands to: - -- [Add and update code locations](#adding-and-updating-code-locations) -- [Delete code locations](#deleting-code-locations-1) -- [Sync the workspace and remote](#syncing-the-workspace) - -These commands perform the same underlying operations as editing your code locations in the **Deployment** tab in Dagster Cloud. Refer to the [dagster-cloud CLI guide](/dagster-cloud/managing-deployments/dagster-cloud-cli) for more info and installation instructions. - -### Adding and updating code locations - -You can add or update a code location with the `add-location` command. For example, to add our public example image, you can run: - -```shell -# Set up YAML file for example location -cat > example_location.yaml < -``` - -### Syncing the workspace - -You can also keep the YAML configuration for your entire workspace in a [`dagster_cloud.yaml`](/dagster-cloud/managing-deployments/dagster-cloud-yaml) file and use the `dagster-cloud sync` command to reconcile the workspace config in Dagster Cloud with that local file. - -For example, if you have the following `dagster_cloud.yaml` file: - -```yaml caption=dagster_cloud.yaml -locations: - - location_name: machine-learning - image: myregistry/dagster-machine-learning:mytag - code_source: - package_name: dagster_cloud_machine_learning - executable_path: /my/folder/python_executable - attribute: my_repo - - location_name: data-eng - image: myregistry/dagster-data-eng:myothertag - code_source: - python_file: repo.py - working_directory: /my/folder/working_dir/ -``` - -Reconcile the above file with Dagster Cloud's remote workspace by running: - -```shell -dagster-cloud workspace sync -w dagster_cloud.yaml -``` diff --git a/docs/content/dagster-cloud/managing-deployments/dagster-cloud-cli.mdx b/docs/content/dagster-cloud/managing-deployments/dagster-cloud-cli.mdx deleted file mode 100644 index 233ac639b7a87..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/dagster-cloud-cli.mdx +++ /dev/null @@ -1,165 +0,0 @@ ---- -title: "Using the dagster-cloud CLI | Dagster Docs" ---- - -# Using the dagster-cloud CLI - -This guide is applicable to Dagster Cloud. - -The `dagster-cloud` CLI is a command-line toolkit designed to work with Dagster Cloud. - -In this guide, we'll cover how to install and configure the `dagster-cloud` CLI, get help, and use some helpful environment variables and CLI options. - ---- - -## Installing the CLI - -The Dagster Cloud Agent library is available in PyPi. To install, run: - -```shell -pip install dagster-cloud -``` - -Refer to the [configuration section](#configuring-the-cli) for next steps. - -### Completions - -Optionally, you can install command-line completions to make using the `dagster-cloud` CLI easier. - -To have the CLI install these completions to your shell, run: - -```shell -dagster-cloud --install-completion -``` - -To print out the completion for copying or manual installation: - -```shell -dagster-cloud --show-completion -``` - ---- - -## Configuring the CLI - -The recommended way to set up your CLI's config for long-term use is through the configuration file, located by default at `~/.dagster_cloud_cli/config`. - -### Setting up the configuration file - -Set up the config file: - -```shell -dagster-cloud config setup -``` - -Select your authentication method. **Note**: Browser authentication is the easiest method to configure. - -
    -BROWSER AUTHENTICATION - -The easiest way to set up is to authenticate through the browser. - -```shell -$ dagster-cloud config setup -? How would you like to authenticate the CLI? (Use arrow keys) - » Authenticate in browser - Authenticate using token -Authorized for organization `hooli` - -? Default deployment: prod -``` - -When prompted, you can specify a default deployment. If specified, a deployment won't be required in subsequent `dagster-cloud` commands. The default deployment for a new Dagster Cloud organization is `prod`. - -
    - -
    -TOKEN AUTHENTICATION - -Alternatively, you may authenticate using a user token. Refer to the [Managing user and agent tokens guide](/dagster-cloud/account/managing-user-agent-tokens) for more info. - -```shell -$ dagster-cloud config setup -? How would you like to authenticate the CLI? (Use arrow keys) - Authenticate in browser - » Authenticate using token - -? Dagster Cloud organization: hooli -? Dagster Cloud user token: ************************************* -? Default deployment: prod -``` - -When prompted, specify the following: - -- **Organization** - Your organization name as it appears in your Dagster Cloud URL. For example, if your Dagster Cloud instance is `https://hooli.dagster.cloud/`, this would be `hooli`. -- **User token** - The user token. -- **Default deployment** - **Optional**. A default deployment. If specified, a deployment won't be required in subsequent `dagster-cloud` commands. The default deployment for a new Dagster Cloud organization is `prod`. - -
    - -### Viewing and modifying the configuration file - -To view the contents of the CLI configuration file, run: - -```shell -$ dagster-cloud config view - -default_deployment: prod -organization: hooli -user_token: '*******************************8214fe' -``` - -Specify the `--show-token` flag to show the full user token. - -To modify the existing config, re-run: - -```shell -dagster-cloud config setup -``` - ---- - -## Toggling between deployments - -To quickly toggle between deployments, run: - -```shell -dagster-cloud config set-deployment -``` - ---- - -## Getting help - -To view help options in the CLI: - -```shell -dagster-cloud --help -``` - ---- - -## Reference - -- [Custom configuration file path](#custom-configuration-file-path) -- [Environment variables and CLI options](#environment-variables-and-cli-options) - -### Custom configuration file path - -Point the CLI at an alternate config location by specifying the `DAGSTER_CLOUD_CLI_CONFIG` environment variable. - -### Environment variables and CLI options - -Environment variables and CLI options can be used in place of or to override the CLI configuration file. - -The priority of these items is as follows: - -- **CLI options** - highest -- **Environment variables** -- **CLI configuration** - lowest - -| Setting | Environment variable | CLI flag | CLI config value | -| ------------ | ---------------------------- | ---------------------- | -------------------- | -| Organization | `DAGSTER_CLOUD_ORGANIZATION` | `--organization`, `-o` | `organization` | -| Deployment | `DAGSTER_CLOUD_DEPLOYMENT` | `--deployment`, `-d` | `default_deployment` | -| User Token | `DAGSTER_CLOUD_API_TOKEN` | `--user-token`, `-u` | `user_token` | diff --git a/docs/content/dagster-cloud/managing-deployments/deployment-settings-reference.mdx b/docs/content/dagster-cloud/managing-deployments/deployment-settings-reference.mdx deleted file mode 100644 index bf97de687e0e2..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/deployment-settings-reference.mdx +++ /dev/null @@ -1,238 +0,0 @@ ---- -title: "Dagster Cloud deployment settings reference" -description: "Detailed info about configurable settings for Dagster Cloud deployments." ---- - -# Dagster Cloud deployment settings reference - -This guide is applicable to Dagster Cloud. - -This reference describes the settings that can be configured for full deployments in [Dagster Cloud](/dagster-cloud). - -Refer to the [Managing deployments in Dagster Cloud guide](/dagster-cloud/managing-deployments/managing-deployments#configuring-deployment-settings) for info about configuring settings in the Dagster Cloud interface or using the dagster-cloud CLI. - ---- - -## Settings schema - -Settings are formatted using YAML. For example: - -```yaml -run_queue: - max_concurrent_runs: 10 - tag_concurrency_limits: - - key: "database" - value: "redshift" - limit: 5 - -run_monitoring: - start_timeout_seconds: 1200 - cancel_timeout_seconds: 1200 - -run_retries: - max_retries: 0 - -sso_default_role: EDITOR - -auto_materialize: - run_tags: {} -``` - ---- - -## Settings - -For each deployment, you can configure settings for: - -- [Run queue](#run-queue-run_queue) -- [Run monitoring](#run-monitoring-run_monitoring) -- [Run retries](#run-retries-run_retries) -- [SSO default role](#sso-default-role) -- [Non-isolated runs](#non-isolated-runs) -- [Auto-materialize](#auto-materialize) - -### Run queue (run_queue) - -The `run_queue` settings allow you to specify how many runs can execute concurrently in the deployment. - -```yaml -run_queue: - max_concurrent_runs: 10 - tag_concurrency_limits: - - key: "database" - value: "redshift" - limit: 5 -``` - - - - The maximum number of runs that are allowed to be in progress at once. Set - to 0 to stop any runs from launching. Negative values aren't - permitted. - - - - A list of limits applied to runs with particular tags. -
      -
    • - Default - [] -
    • -
    - Each list item may have the following properties: -
      -
    • - key -
    • -
    • - value -
        -
      • - If defined, the limit is applied only to the{" "} - key-value pair. -
      • -
      • - If not defined, the limit is applied across all values - of the - key. -
      • -
      • - If set to a dict with applyLimitPerUniqueValue: true, - the limit is applied to the number of unique values for - the key. -
      • -
      -
    • -
    • - limit -
    • -
    -
    -
    - -### Run monitoring (run_monitoring) - -The `run_monitoring` settings allow you to define how long Dagster Cloud should wait for runs to start before making them as failed, or to terminate before marking them as canceled. - -```yaml -run_monitoring: - start_timeout_seconds: 1200 - cancel_timeout_seconds: 1200 -``` - - - - The number of seconds that Dagster Cloud will wait after a run is launched - for the process or container to start executing. After the timeout, the run - will fail. This prevents runs from hanging in STARTING{" "} - indefinitely when the process or container doesn't start. -
      -
    • - Default - 1200 (20 minutes) -
    • -
    -
    -
    - -### Run retries (run_retries) - -The `run_retries` settings allow you to define how Dagster Cloud handles retrying failed runs in the deployment. - -```yaml -run_retries: - max_retries: 0 -``` - - - - The maximum number of times Dagster Cloud should attempt to retry a failed - run. Dagster Cloud will use the default if this setting is undefined. -
      -
    • - Default - 0 -
    • -
    -
    -
    - -### SSO default role - -The `sso_default_role` setting lets you configure the default role on the deployment which is granted to new users that log in via SSO. For more information on available roles, see the [Dagster Cloud permissions reference](/dagster-cloud/account/managing-users/managing-user-roles-permissions#user-permissions-reference). - -```yaml -sso_default_role: EDITOR -``` - - - - If SAML SSO is enabled, this is the default role that will be assigned to - Dagster Cloud users for this deployment. If SAML SSO is not enabled, this - setting is ignored. -
      -
    • - Default - VIEWER -
    • -
    -
    -
    - -### Non-isolated runs - -Configure [non-isolated runs](/dagster-cloud/deployment/serverless) on your deployment. - -```yaml -non_isolated_runs: - enabled: True - max_concurrent_non_isolated_runs: 1 -``` - - - - If enabled, the `Isolate run environment` checkbox will appear in the - Launchpad. -
      -
    • - Default - true -
    • -
    -
    - - A limit for how many non-isolated runs to launch at once. Once this limit is - reached, the checkbox will be greyed out and all runs will be isolated. This - helps to avoid running out of RAM on the code location server. -
      -
    • - Default - 1 -
    • -
    -
    -
    - -### Auto-materialize - -Configure [auto-materializing assets](/concepts/assets/asset-auto-execution) for your deployment. - -```yaml -auto_materialize: - run_tags: - foo: bar -``` - - - - Tags to apply to all runs created for an auto-materialize policy. -
      -
    • - Default - None -
    • -
    -
    -
    diff --git a/docs/content/dagster-cloud/managing-deployments/environment-variables-and-secrets.mdx b/docs/content/dagster-cloud/managing-deployments/environment-variables-and-secrets.mdx deleted file mode 100644 index 53de8bfb7ee4f..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/environment-variables-and-secrets.mdx +++ /dev/null @@ -1,530 +0,0 @@ ---- -title: "Dagster Cloud environment variables and secrets | Dagster Docs" ---- - -# Dagster Cloud environment variables and secrets - -This guide is applicable to Dagster Cloud. - - - ---- - -## Understanding environment variables and secrets - -- [Overview](#overview) -- [Storage and encryption](#storage-and-encryption) -- [Scope](#scope) -- [Reserved variables](#reserved-variables) - -### Overview - -There are two ways to declare and manage variables in Dagster Cloud: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Dagster Cloud UI - - Agent configuration -
    - Deployment type support - - Serverless,{" "} - Hybrid - - Hybrid -
    - How it works - - Environment variables are managed in the Dagster Cloud UI. Values are - pulled from storage and decrypted when your code is executed. - - Environment variables are defined in the agent's configuration. - Variables set at the code location level will pass through Dagster - Cloud, while those set at the deployment level bypass Dagster Cloud - entirely. Refer to the{" "} - - Setting environment variables for Dagster Cloud agents guide - {" "} - for more info. -
    - Requirements - -
      -
    • - Dagster code uses version 1.0.17 or later -
    • -
    • - If using Hybrid, - agent uses Dagster version 1.0.17 or later -
    • -
    • - - Editor, Admin, or Organization Admin permissions - {" "} - in Dagster Cloud. Note: Editors and Admins can only - set environment variables for deployments where they're an Editor or - Admin. -
    • -
    -
    - Ability to modify your dagster.yaml and{" "} - - dagster_cloud.yaml - {" "} - files -
    - Limitations - -
      -
    • - Maximum of 1,000 variables per full deployment -
    • -
    • Variables must be less than or equal to 4KB in size
    • -
    • - Variable names: -
        -
      • Must be 512 characters or less in length
      • -
      • Must start with a letter or underscore
      • -
      • Must contain only letters, numbers, and underscores
      • -
      • - May not be the same as{" "} - built-in (system) variables -
      • -
      -
    • -
    -
    - Variable names: -
      -
    • Must start with a letter or underscore
    • -
    • Must contain only letters, numbers, and underscores
    • -
    -
    - Storage and encryption - - Uses Amazon Key Management Services (KMS) and envelope encryption. Refer - to the{" "} - Storage and encryption section for - more info. - Dependent on agent type (ex: Kubernetes)
    - Scope - - Scoped by deployment (full and branch) and optionally, code location - - Scoped by code location. Variables can be set for a full deployment (all - code locations) or on a per-code location basis. -
    - -### Storage and encryption - - - This section is applicable only if using the Dagster Cloud UI to manage - environment variables. - - -To securely store environment variables defined using the Dagster Cloud UI, Dagster Cloud uses [Amazon Key Management Services (KMS)](https://docs.aws.amazon.com/kms/index.html) and [envelope encryption](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#enveloping). Envelope encryption is a multi-layered approach to key encryption. Plaintext data is encrypted using a data key, and then the data under the data key is encrypted under another key. - -Here's a look at how it works in Dagster Cloud: - -
    - Dagster Cloud encryption key hierarchy diagram -
    - -In Dagster Cloud, each customer account is assigned a unique key, which then encrypts the data associated with that account. All customer keys are encrypted using a non-exportable AWS KMS master key. - -This approach isolates each account's data and reduces the risk of exposure by limiting the amount of data a single key can access. - -### Scope - - - This section is applicable only if using the Dagster Cloud UI to manage - environment variables. - - -Environment variables can be scoped to specific deployments and/or code locations. When creating or modifying an environment variable, you'll be prompted to select the deployment(s) to scope the variable to: - -- **Full deployment** - Variables with this scope will be available to selected code locations in the full deployment - -- **Branch deployments** - Variables with this scope will be available to selected code locations in Branch Deployments. - - **Note**: While viewing a Branch Deployment in Dagster Cloud, variables will be read-only. Environment variables must be managed in the Branch Deployment's parent full deployment, which will usually be `prod`. - -- **Local** - Variables with this scope will be included when [downloading variables to a local `.env` file](#exporting-variables-to-a-env-file) - -By default, new environment variables default to all deployments and all code locations. - -#### Same variable, different scopes and values - -You can create multiple instances of the same environment variable with different values for different scopes. Each instance of the variable can then be scoped to a deployment and code location (or locations). This approach can be useful for parameterizing behavior by environment without needing to modify your application code. - -For example, let's say we want to use different database passwords in production and testing (Branch Deployments). In our code, we use the `SNOWFLAKE_PASSWORD` environment variable to pass in the database password. To use different passwords between production and Branch Deployments, we can create two instances of `SNOWFLAKE_PASSWORD`. One instance is scoped to the `prod` deployment and the other only to Branch Deployments: - - - -In this example, the value of `SNOWFLAKE_PASSWORD` would be `production_password` in the `prod` deployment and `testing_password` in a Branch Deployment. - -### Reserved variables - - - This section is applicable only if using the Dagster Cloud UI to manage - environment variables. - - -[Built-in (system) Dagster Cloud environment variables](#built-in-environment-variables) are reserved and therefore unavailable for use. [An error will surface in Dagster Cloud](#troubleshooting) if a built-in variable name is used. - ---- - -## Managing environment variables - -The simplest way to manage environment variables is to use Dagster Cloud's built-in manager which allows you to create and manage environment variables right in the UI. - -### Using the Dagster Cloud UI - - - To manage environment variables using the Dagster Cloud UI, you need: -
      -
    • - Organization, Admin, or{" "} - Editor permissions. If you're a Dagster Cloud{" "} - - Editor or Admin - - , you can only set environment variables for full deployments where you're - an Editor or Admin. -
    • -
    • - Dagster code on version 1.0.17 or later. If using Hybrid, your agent must - also use 1.0.17 or later. -
    • -
    -
    - -- [Creating new variables](#creating-new-variables) -- [Editing, viewing, and deleting variables](#editing-viewing-and-deleting-variables) -- [Exporting variables to a `.env` file](#exporting-variables-to-a-env-file) - -#### Creating new variables - -1. Sign in to your Dagster Cloud account. - -2. Click **Deployment > Environment variables**. - -3. Click **+ Add Environment Variable** to add a new environment variable. - -4. In the window that displays, fill in the following: - - - **Name** - Enter a name for the environment variable. This is how the variable will be referenced in your code. - - **Value** - Enter a value for the environment variable. - - In **Deployment Scope**, select the deployment(s) where the variable should be accessible: - - **Full deployment** - The variable will be available to selected code locations in the full deployment - - **Branch deployments** - The variable will be available to selected code locations in Branch Deployments - - **Local** - If selected, the variable will be included when [exporting environment variables to a local `.env` file](#exporting-variables-to-a-env-file) - - In **Code Location Scope**, select the code location(s) where the variable should be accessible. At least one code location is required. Refer to the [Scope](#scope) section for more info. - - For example: - - Create new environment variable dialog window in Dagster Cloud - -5. When finished, click **Save**. Dagster Cloud will automatically re-deploy the workspace to apply the changes. - -#### Editing, viewing, and deleting variables - -After the environment variable is created: - -- **To edit an environment variable**, click the **Edit** button. -- **To view an environment variable's value**, click the **eye icon** in the variable's **Value** column. Click the icon again to conceal the value. -- **To delete an environment variable**, click the **Trash icon** and confirm the deletion when prompted. - -#### Exporting variables to a .env file - -All variables with the `local` deployment scope can be exported to an `.env` file and used locally. To create the file: - -1. In the **Environment variables** tab, click the menu next to **+ Add environment variable**: - - Highlighted Download local variables file in Environment Variables tab of Dagster Cloud - -2. Click **Download local environment variables** - -3. Save the file. **Note**: If saved to the same folder where you run `dagster-webserver`, Dagster will [automatically load the variables in the `.env` file](/guides/dagster/using-environment-variables-and-secrets#declaring-environment-variables) when the webserver is started. - -### Using agent configuration (Hybrid) - -Only supported for Hybrid deployments. - -For Dagster Cloud Hybrid deployments, making environment variables accessible is accomplished by adding them to your agent's configuration. How this is accomplished depends on the agent type. - -Refer to the [Setting environment variables for Dagster Cloud agents guide](/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents) for more info. - ---- - -## Accessing environment variables in Dagster code - -Ready to start using environment variables in your Dagster code? Refer to the [Using environment variables and secrets in Dagster code guide](/guides/dagster/using-environment-variables-and-secrets) for more info and examples. - ---- - -## Built-in environment variables - -[Dagster Cloud](/dagster-cloud) provides a set of built-in, automatically populated environment variables, such as the name of a deployment or details about a branch deployment commit, that can be used to modify behavior based on environment. - -### All deployment variables - -The following variables are available in every deployment of your Dagster Cloud instance, including full (e.g., `prod`) and branch deployments. - - - - The name of the Dagster Cloud deployment. For example, prod. - - - If 1, the deployment is a{" "} - - branch deployment - - . Refer to the - Branch Deployment variables section - for a list of variables available in branch deployments. - - - -### Branch Deployment variables - -The following environment variables are currently available only in a [branch deployment](/dagster-cloud/managing-deployments/branch-deployments). - -For every commit made to a branch, the following metadata is available: - - - - The SHA of the commit. - - - The time the commit occurred. - - - The email of the git user who authored the commit. - - - The name of the git user who authored the commit. - - - The message associated with the commit. - - - The name of the branch associated with the commit. - - - The name of the repository associated with the commit. - - - The ID of the pull request associated with the commit. - - - The status of the pull request at the time of the commit. Possible values - are OPEN, CLOSED, and MERGED. - - - ---- - -## Troubleshooting - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - Error - Description and resolution
    - [variable] is a reserved environment variable name. - - Dagster reserves the names of{" "} - built-in (system) variables. To - resolve, change the variable's name to a name that isn't currently{" "} - reserved and meets the other naming - requirements. -
    - - Environment variables must be no greater than 4KB in size. - - - To resolve, reduce the size of the environment variable's value to less - than the maximum of 4KB. -
    - - Environment variable names must be no longer than 512 characters. - - - To resolve, reduce the number of characters in the variable's name. -
    - Invalid environment variable name [variable] - - The name of the environment variable doesn't meet one or several of - Dagster's naming requirements. To resolve, change the variable's name - to: -
      -
    • Start with a letter or underscore
    • -
    • Contain only letters, numbers, and underscores
    • -
    -
    - - Deployment [deployment_name] has reached the maximum of 1,000 - environment variables. - - - The maximum number of environment variables for the full deployment has - been reached. New variables cannot be added. Remove any unneeded - variables to reduce the total below the maximum, then add new variables. -
    - ---- - -## Related - - - - - - - - - diff --git a/docs/content/dagster-cloud/managing-deployments/managing-deployments.mdx b/docs/content/dagster-cloud/managing-deployments/managing-deployments.mdx deleted file mode 100644 index e93b028ec6da5..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/managing-deployments.mdx +++ /dev/null @@ -1,198 +0,0 @@ ---- -title: Managing deployments in Dagster Cloud | Dagster Docs -description: Learn to manage and configure your Dagster Cloud deployments. ---- - -# Managing deployments in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In Dagster Cloud, there are two types of deployments: - -- [**Branch deployments**](/dagster-cloud/managing-deployments/branch-deployments), which are temporary deployments built for testing purposes -- **Full deployments**, which are persistent, fully-featured deployments intended to perform actions on a recurring basis - -This guide will focus on **full deployments**, hereafter referred to simply as deployments. - ---- - -## Understanding deployments - -Deployments are standalone environments, allowing you to operate independent instances of Dagster with separately managed permissions. - -When a Dagster Cloud organization is created, a single deployment named `prod` will also be created. To create additional deployments, an [Enterprise plan](https://dagster.io/pricing) is required. - -Each deployment can have one or multiple [code locations](/dagster-cloud/managing-deployments/code-locations). - -**Concerned about testing environments?** We recommend using Branch Deployments to test your changes, even if you're able to create additional deployments. Branch deployments are available for all Dagster Cloud users, regardless of plan. - -Refer to the [Branch Deployment docs](/dagster-cloud/managing-deployments/branch-deployments) for more info, or the [Testing against production using Branch Deployments guide](/guides/dagster/branch_deployments) for a real-world example. - ---- - -## Viewing and switching deployments - -In Dagster Cloud, you can view and switch between deployments using the **deployment switcher**: - - - -To view all deployments, click **View all deployments**. - ---- - -## Creating deployments - - - - Organization Admin permissions - {" "} - are required to create deployments. Additionally, note that creating multiple - deployments requires an{" "} - - Enterprise plan - - . - - -To create a deployment: - -1. Sign in to your Dagster Cloud account. -2. Access the **Deployments** page using one of the following options: - - Click the **deployment switcher > View all deployments**. - - Click **your user icon > Organization Settings > Deployments**. -3. Click the **+ New deployment** button. -4. In the modal that displays, fill in the following: - - **Name** - Enter a name for the deployment. - - **Initial deployment permissions** - Select the permissions you want to use to create the deployment: - - **Empty permissions** - Creates the deployment with an empty set of permissions. **Note**: Only Organization Admins will be able to manage the deployment until other uses are granted Admin or Editor permissions. - - **Copy from** - Creates the deployment using permissions duplicated from an existing deployment. -5. When finished, click **Create deployment**. - ---- - -## Deleting deployments - - - - Organization Admin permissions - {" "} - are required to delete deployments. Additionally, note that deleting a - deployment also deletes all its associated data, including code locations, - jobs, schedules, and sensors. - - -To delete a deployment: - -1. Sign in to your Dagster Cloud account. -2. Access the **Deployments** page using one of the following options: - - Click the **deployment switcher > View all deployments**. - - Click the **deployment switcher**, then the **gear icon** next to the deployment. - - Click **your user icon > Organization Settings > Deployments**. -3. Click the **Delete** button next to the deployment you want to delete. -4. When prompted, confirm the deletion. - ---- - -## Configuring deployment settings - - - - Organization Admin permissions - {" "} - are required to modify deployment settings. - - -Deployment settings can be configured in the Dagster Cloud interface or using the dagster-cloud CLI. Refer to the [deployment settings reference](/dagster-cloud/managing-deployments/deployment-settings-reference) for more info about individual settings. - - - - To configure deployment settings in the Dagster Cloud UI: - -
      -
    1. Sign in to your Dagster Cloud account.
    2. -
    3. Access the Deployments page using one of the following: -
        -
      • Click the deployment switcher > View all deployments.
      • -
      • Click the deployment switcher, then the gear icon next to the deployment.
      • -
      • Click your user icon > Organization Settings > Deployments.
      • -
      -
    4. -
    5. Click the Settings button next to the deployment you want to configure.
    6. -
    7. In the window that displays, configure settings for the deployment.
    8. -
    9. When finished, click Save deployment settings.
    10. -
    -
    - - -Note: dagster-cloud 0.13.14 or later must be installed to run the CLI. Agent and/or job code doesn't need to be upgraded. - - -Create a file with the settings you'd like to configure. For example: - -```yaml -# my-settings.yaml - -run_queue: - max_concurrent_runs: 10 - tag_concurrency_limits: - - key: "special-runs" - limit: 5 - -run_monitoring: - start_timeout_seconds: 1200 - cancel_timeout_seconds: 1200 - -run_retries: - max_retries: 0 -``` - -Use the CLI to upload the settings file: - -```shell -dagster-cloud deployment settings set-from-file my-settings.yaml -``` - -This will replace all of your configured settings. Any that are not specified will resort to their default values. You also use the CLI to read your current settings, including the default values: - -```shell -dagster-cloud deployment settings get -``` - - -
    - ---- - -## Related - - - - - - - - - diff --git a/docs/content/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents.mdx b/docs/content/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents.mdx deleted file mode 100644 index 5e9d454bf227f..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/setting-environment-variables-dagster-cloud-agents.mdx +++ /dev/null @@ -1,246 +0,0 @@ ---- -title: Setting Dagster Cloud environment variables using agent configuration | Dagster Docs -description: Set environment variables in your Dagster Cloud agent. ---- - -# Setting Dagster Cloud environment variables using agent configuration - -This guide is applicable to Dagster Cloud. - -In this guide, we'll walk you through setting environment variables for a Dagster Cloud [Hybrid deployment](/dagster-cloud/deployment/hybrid) using the Hybrid agent's configuration. - -There are two ways to set environment variables: - -- **On a per-code location basis**, which involves modifying the `dagster.yaml` file. **Note**: This approach is functionally the same as [setting environment variables using the Dagster Cloud UI](/dagster-cloud/managing-deployments/environment-variables-and-secrets). Values will pass through Dagster Cloud. -- **For a full deployment and all the code locations it contains**. This approach makes variables available for all code locations in a full Dagster Cloud deployment. As values are pulled from the user cluster, values will bypass Dagster Cloud entirely. - ---- - -## Prerequisites - -To complete the steps in this guide, you'll need: - -- A Dagster Cloud account using [Hybrid deployment](/dagster-cloud/deployment/hybrid) -- An existing [Hybrid agent](/dagster-cloud/deployment/agents) -- **Editor**, **Admin**, or **Organization Admin** permissions in Dagster Cloud - ---- - -## Setting environment variables for a code location - - - To set environment variables, you need{" "} - - one of the following user roles - {" "} - in Dagster Cloud: -
      -
    • Organization Admin, or
    • -
    • - Editor or Admin. Note: Editors and Admins can only set - environment variables in full deployments where you're an Editor or Admin. -
    • -
    -
    - -Setting environment variables for specific code locations is accomplished by adding them to your agent's configuration in your project's [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml). The `container_context` property in this file sets the variables in the agent's environment. - -**Note**: This approach is functionally the same as [setting environment variables using the Dagster Cloud UI](/dagster-cloud/managing-deployments/environment-variables-and-secrets). - -How `container_context` is configured depends on the agent type. Click the tab for your agent type to view instructions. - - - - -### Amazon ECS agents - - - -After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster Cloud to apply the changes: - - - - - - -### Docker agents - - - -After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster Cloud to apply the changes: - - - - - - -### Kubernetes agents - - - -After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster Cloud to apply the changes: - - - - - - ---- - -## Setting environment variables for full deployments - - - If you're a Dagster Cloud{" "} - - Editor or Admin - - , you can only set environment variables for full deployments where you're an - Editor - or Admin. - - -Setting environment variables for a full deployment will make the variables available for all code locations in the full deployment. Using this approach will pull variable values from your user cluster, bypassing Dagster Cloud entirely. - -Click the tab for your agent type to view instructions. - - - - -### Amazon ECS agents - -To make environment variables accessible to a full deployment with an Amazon ECS agent, you'll need to modify the agent's CloudFormation template as follows: - -1. Sign in to your AWS account. - -2. Navigate to **CloudFormation** and open the stack for the agent. - -3. Click **Update**. - -4. Click **Edit template in designer**. - -5. In the section that displays, click **View in Designer**. The AWS template designer will display. - -6. In the section displaying the template YAML, locate the `AgentTaskDefinition` section: - - Highlighted AgentTaskDefinition section of the AWS ECS agent CloudFormation template in the AWS Console - -7. In the `user_code_launcher.config` portion of the `AgentTaskDefinition` section, add the environment variables as follows: - - ```yaml - user_code_launcher: - module: dagster_cloud.workspace.ecs - class: EcsUserCodeLauncher - config: - cluster: ${ConfigCluster} - subnets: [${ConfigSubnet}] - service_discovery_namespace_id: ${ServiceDiscoveryNamespace} - execution_role_arn: ${TaskExecutionRole.Arn} - task_role_arn: ${AgentRole} - log_group: ${AgentLogGroup} - env_vars: - - SNOWFLAKE_USERNAME=dev - - SNOWFLAKE_PASSWORD ## pulled from agent environment - ' > $DAGSTER_HOME/dagster.yaml && cat $DAGSTER_HOME/dagster.yaml && dagster-cloud agent run" - ``` - -8. When finished, click the **Create Stack** button: - - Highlighted Create Stack button in the AWS Console - -9. You'll be redirected back to the **Update stack** wizard, where the new template will be populated. Click **Next**. - -10. Continue to click **Next** until you reach the **Review** page. - -11. Click **Submit** to update the stack. - - - - -### Docker agents - -To make environment variables accessible to a full deployment with a Docker agent, you'll need to modify your project's `dagster.yaml` file. - -In the `user_code_launcher` section, add an `env_vars` property as follows: - -```yaml -# dagster.yaml - -user_code_launcher: - module: dagster_cloud.workspace.docker - class: DockerUserCodeLauncher - config: - networks: - - dagster_cloud_agent - env_vars: - - SNOWFLAKE_PASSWORD # value pulled from agent's environment - - SNOWFLAKE_USERNAME=dev -``` - -In `env_vars`, specify the environment variables as keys (`SNOWFLAKE_PASSWORD`) or key-value pairs (`SNOWFLAKE_USERNAME=dev`). If only `KEY` is provided, the value will be pulled from the agent's environment. - - - - -### Kubernetes agents - -To make environment variables to a full deployment with a Kubernetes agent, you'll need to modify and upgrade the Helm chart's `values.yaml`. - -1. In `values.yaml`, add or locate the `workspace` value. - -2. Add an `envVars` property as follows: - - ```yaml - # values.yaml - - workspace: - envVars: - - SNOWFLAKE_PASSWORD # value pulled from agent's environment - - SNOWFLAKE_USERNAME=dev - ``` - -3. In `envVars`, specify the environment variables as keys (`SNOWFLAKE_PASSWORD`) or key-value pairs (`SNOWFLAKE_USERNAME=dev`). If only `KEY` is provided, the value will be pulled from the local (agent's) environment. - -4. Upgrade the Helm chart. - - - - ---- - -## Related - - - - - diff --git a/docs/content/dagster-cloud/managing-deployments/setting-up-alerts.mdx b/docs/content/dagster-cloud/managing-deployments/setting-up-alerts.mdx deleted file mode 100644 index 652fd6173d7bb..0000000000000 --- a/docs/content/dagster-cloud/managing-deployments/setting-up-alerts.mdx +++ /dev/null @@ -1,221 +0,0 @@ ---- -title: "Setting up alerts in Dagster Cloud | Dagster Docs" ---- - -# Setting up alerts in Dagster Cloud - -This guide is applicable to Dagster Cloud. - -In this guide, we'll walk you through configuring alerts in Dagster Cloud. - ---- - -## Understanding alert policies - -Alert policies define which events will trigger an alert, the conditions under which an alert will be sent, and how the alert will be sent. - -- **Asset based alert policies** can trigger on asset materialization failure or success, as well as asset check error, warn, passed, or failure to execute. An asset group or asset key can be provided to asset based alert policies, which limits notifications to only fire if the asset group or asset key matches the materialized asset. In the case of checks, notifications will only be sent if the asset group/key to which the check is attached matches. **Note:** Asset based alert policies are still experimental, and may be subject to change as we gather user feedback. -- **Job run based alert policies** include a set of configured tags. If an alert policy has no configured tags, all jobs will be eligible for that alert. Otherwise, only jobs that contain all the tags for a given alert policy are eligible for that alert. -- **Alert policies created for schedule/sensor tick failure will apply to all schedules/sensors**. However, you will only receive alerts when the schedule/sensor changes from a state of succeeding to failing, so subsequent failures will not trigger new alerts. -- **Code location error alert policies** will trigger when a code location fails to load due to an error. -- **Agent downtime alert policies** will trigger when a Hybrid agent stops heart beating. - -Alert policies are configured on a per-deployment basis. For example, asset alerts configured in a `prod` deployment are only applicable to assets in that deployment. - -Currently, Slack and email notifications are supported. - ---- - -## Managing alert policies in Dagster Cloud - - - Organization Admin, Admin, or{" "} - Editor permissions are required to manage alerts in Dagster - Cloud. -
    -
    - If you're a Dagster Cloud - Admin - or Editor, you can only manage alerts in deployments where - you're an Admin. -
    - -- [Creating alert policies](#creating-alert-policies) -- [Editing alert policies](#editing-alert-policies) -- [Enabling and disabling alert policies](#enabling-and-disabling-alert-policies) -- [Deleting alert policies](#deleting-alert-policies) - -### Creating alert policies - -1. Sign in to your Dagster Cloud account. - -2. In the top navigation, click **Deployment** - -3. Click the **Alerts** tab. - -4. Click **+ Create alert policy**. - -5. From the **Alert Policy type** drop-down, select the type of alert to create. - -6. In the **Create alert policy** window, fill in the following: - - - **Alert policy name** - Enter a name for the alert policy. For example, `slack_urgent_failure` - - **Description** - Enter a description for the alert policy - - For asset-based alerts, fill out these additional options: - - - **Asset group** - Select the asset group to monitor. You will have the option to select from all asset groups in the deployment. - - **Asset key** - Select the asset key to monitor. You will have the option to select from all asset keys in the deployment. **Note:** If you select an asset group, you will not be able to select an asset key. - - **Events** - Select whether the alert should trigger on asset materialization failure, asset materialization success, asset check error, asset check warn, asset check passed, or asset check failure to execute - - For job-based alerts, fill out these additional options: - - - **Tags** - Add tag(s) for the alert policy. Jobs with these tags will trigger the alert. For example: `level:critical` or `team:sales` - - **Events** - Select whether the alert should trigger on job success, failure, or both - - **Notification service** - Select the service for the alert policy: - - - **Slack** - If you haven't connected Slack, click **Connect** to add the Dagster Cloud Slack app to your workspace. After the installation completes, invite the `@Dagster Cloud` bot user to the desired channel. - - You can then configure the alert policy to message this channel. **Note**: Only messaging one channel per alert policy is currently supported: - - Slack alert configured to alert the sales-notifications channel - - - **Email** - Email alerts can be sent to one or more recipients. For example: - - Email alert configured to alert two recipients - -7. When finished, click **Save policy**. - -### Editing alert policies - -To edit an existing alert policy, click the **Edit** button next to the policy: - - - -### Enabling and disabling alert policies - -To enable or disable an alert, use the toggle on the left side of the alert policy. - -### Deleting alert policies - -To delete an alert policy, click the **Delete** button next to the policy. When prompted, confirm the deletion. - - - ---- - -## Managing alert policies with the dagster-cloud CLI - -With the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli), you can: - -- [Set a full deployment's policies](#setting-a-full-deployments-policies) -- [View a full deployment's policies](#viewing-a-full-deployments-policies) -- [Configure a Slack alert policy](#configuring-a-slack-alert-policy) -- [Configure an email alert policy](#configuring-an-email-alert-policy) - -### Setting a full deployment's policies - -A list of alert policies can be defined in a single YAML file. After declaring your policies, set them for the deployment using the following command: - -```bash -dagster-cloud deployment alert-policies sync -a -``` - -### Viewing a full deployment's policies - -List the policies currently configured on the deployment by running: - -```bash -dagster-cloud deployment alert-policies list -``` - -### Configuring a Slack alert policy - -In this example, we'll configure a Slack notification to trigger whenever a run of a job succeeds or fails. This job, named `sales_job`, has a `team` tag of `sales`: - -```python -@op -def sales_computation(): - ... - - -@job(tags={"team": "sales"}) -def sales_job(): - sales_computation() -``` - -In the alert policies YAML file, we'll define a policy that listens for jobs with a `team` tag of `sales` to succeed or fail. When this occurs, a notification will be sent to the `sales-notification` channel in the `hooli` workspace: - -```yaml -alert_policies: - - name: "slack-alert-policy" - description: "An alert policy to send a Slack notification to sales on job failure or success." - tags: - - key: "team" - value: "sales" - event_types: - - "JOB_SUCCESS" - - "JOB_FAILURE" - notification_service: - slack: - slack_workspace_name: "hooli" - slack_channel_name: "sales-notifications" -``` - -### Configuring an email alert policy - -In this example, we'll configure an email alert when a job fails. This job, named `important_job`, has a `level` tag of `"critical"`: - -```python -def important_computation(): - ... - - -@job(tags={"level": "critical"}) -def important_job(): - important_computation() -``` - -In the alert policies YAML file, we'll define a policy that listens for jobs with a `level` tag of `critical` to fail. When this occurs, an email notification will be sent to `richard.hendricks@hooli.com` and `nelson.bighetti@hooli.com`: - -```yaml -alert_policies: - - name: "email-alert-policy" - description: "An alert policy to email company executives during job failure." - tags: - - key: "level" - value: "critical" - event_types: - - "JOB_FAILURE" - notification_service: - email: - email_addresses: - - "richard.hendricks@hooli.com" - - "nelson.bighetti@hooli.com" -``` - ---- - -### Compatible event types - -When creating an alert policy using the CLI, only certain `event_types` can be specified together. You can specify multiple job run-based event types together (`JOB_SUCCESS`, `JOB_FAILURE`), or a tick-based event type (`TICK_FAILURE`), but attempting to mix these will result in an error. diff --git a/docs/content/dagster-plus.mdx b/docs/content/dagster-plus.mdx new file mode 100644 index 0000000000000..ac3749e318e4e --- /dev/null +++ b/docs/content/dagster-plus.mdx @@ -0,0 +1,201 @@ +--- +title: "Dagster+ | Dagster Docs" +--- + +# Dagster+ + +Dagster+ provides robust, managed infrastructure, elegant CI/CD capability, and multiple deployment options to suit your needs. + +--- + +## Getting started + +Learn how to set up your Dagster+ account! Check out the [Getting started guide](/dagster-plus/getting-started) for everything you need to get up and running. + +--- + +## Understanding deployment types + +Dagster+ currently offers two deployment options to meet your needs: Serverless and Hybrid. Learn about your options and how to manage your selection in Dagster+. + + + + + + +### Managing Hybrid Deployment agents + +Hybrid Deployments use an agent that is responsible for executing your code. Learn how to spin up and maintain an agent in your infrastructure. + + + + + + + + + + +--- + +## Managing organization settings + + + + + +--- + +## Managing authentication and users + +Learn how to manage users and permissions in Dagster+. You can secure your account using our out-of-the-box support for Google and GitHub SSO, or take full control with SAML SSO. + + + + + + + + + + +--- + +## Managing deployments + +Learn how to deploy your code to Dagster+, use command line tools, set up CI/CD, and define environment variables. + + + + + + + + +### Alerts + + + + + + + + + + + +### Environment variables and secrets + + + + + + + +### Branch deployments + + + + + + + diff --git a/docs/content/dagster-plus/account.mdx b/docs/content/dagster-plus/account.mdx new file mode 100644 index 0000000000000..355a91724108b --- /dev/null +++ b/docs/content/dagster-plus/account.mdx @@ -0,0 +1,16 @@ +--- +title: "Managing your Dagster+ account | Dagster Docs" + +platform_type: "cloud" +--- + +# Managing Dagster+ organization settings + +Learn to manage your Dagster+ account. + + + + diff --git a/docs/content/dagster-plus/account/authentication.mdx b/docs/content/dagster-plus/account/authentication.mdx new file mode 100644 index 0000000000000..cff4d85bbd05c --- /dev/null +++ b/docs/content/dagster-plus/account/authentication.mdx @@ -0,0 +1,83 @@ +--- +title: "Dagster+ authentication and users | Dagster Docs" +--- + +# Dagster+ authentication and user management + +In this guide, we'll cover Dagster+'s supported authentication methods and some resources for managing users in your organization. + +--- + +## Authentication methods + +### Single Sign-on + +Dagster+ supports Single Sign-on (SSO) with the following providers: + +- **Google**. Users must be [added in Dagster+](/dagster-plus/account/managing-users#adding-users) before they'll be able to log in using their Google accounts. +- GitHub + +### SAML + +To provide administrators with more control, Dagster+ also supports SAML authentication with the following Identity Providers (IdP): + + + + + + + + + +**Note**: Users must be assigned to the Dagster app in the IdP to be able to log in to Dagster+. Refer to the setup guide for your IdP for more info. + +### SCIM provisioning + +The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. When enabled in Dagster+ alongside SSO, SCIM allows you to efficiently and easily manage users in your IdP and sync their information to Dagster+. + + + + + + +--- + +## User management and role-based access control + +Role-based access control (RBAC) enables you to grant specific permissions to users in your organization, ensuring that Dagster users have access to what they require in Dagster+, and no more. + + + + + + diff --git a/docs/content/dagster-plus/account/authentication/okta/saml-sso.mdx b/docs/content/dagster-plus/account/authentication/okta/saml-sso.mdx new file mode 100644 index 0000000000000..4f00084b92324 --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/okta/saml-sso.mdx @@ -0,0 +1,151 @@ +--- +title: "Setting up Okta SSO for Dagster+ | Dagster Docs" + +display_name: "Okta" +feature_name: "saml_sso_okta" +pricing_plan: "pro" +--- + +# Setting up Okta SSO for Dagster+ + +This guide is applicable to Dagster+. + +In this guide, you'll configure Okta to use single sign-on (SSO) with your Dagster+ organization. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **An existing Okta account** +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Add the Dagster+ app in Okta + +1. Sign in to your Okta Admin Dashboard. + +2. Using the sidebar, click **Applications > Applications**. + +3. On the **Applications** page, click **Browse App Catalog**. + +4. On the **Browse App Integration Catalog** page, search for `Dagster Cloud`. + +5. Add and save the application. + +--- + +## Step 2: Configure SSO in Okta + +1. In Okta, open the Dagster Cloud application and navigate to its **Sign On Settings**. + +2. Scroll down to the **Advanced Sign-on settings** section. + +3. In the **Organization** field, enter your Dagster+ organization name. This is used to route the SAML response to the correct Dagster+ subdomain. + + In the following example, the organization name is `hooli` and our Dagster+ domain is `https://hooli.dagster.cloud`. To configure this correctly, we'd enter `hooli` into the **Organization** field: + + + + Okta Subdomain Configuration + +4. When finished, click **Done**. + +--- + +## Step 3: Upload the SAML metadata to Dagster+ + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In the **Sign On Settings**, navigate to the **SAML Signing Certificates** section. + +2. Click the **Actions** button of the **Active** certificate. + +3. Click **View IdP metadata**: + + The View IdP metadata options in the Okta UI + + This will open a new page in your browser with the IdP metadata in XML format. + +4. Right-click on the page and use **Save As** or **Save Page As**: + + + + In Chrome and Edge, the file will be downloaded as an XML file. In Firefox, choose **Save Page As > Save as type**, then select **All files**. **Note**: Copying and pasting the metadata can cause formatting issues that will prevent successful setup. Saving the page directly from the browser will avoid this. + +5. After you've downloaded the metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +--- + +## Step 4: Grant access to users + +Next, you'll assign users to the Dagster+ application in Okta. This will allow them to log in using their Okta credentials when the single sign-on flow is initiated. + +1. In the Dagster+ application, navigate to **Assignments**. +2. Click **Assign > Assign to People**. +3. For each user you want to have access to Dagster+, click **Assign** then **Save and Go Back**. + +--- + +## Step 5: Test your SSO configuration + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) +- [Identity provider (IdP)-initiated login](#testing-an-identity-provider-initiated-login) + + + + +### Testing a service provider-initiated login + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + + + + +### Testing an identity provider-initiated login + +In the Okta **Applications** page, click the **Dagster+** icon: + + + + + + +If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/content/dagster-plus/account/authentication/okta/scim-provisioning.mdx b/docs/content/dagster-plus/account/authentication/okta/scim-provisioning.mdx new file mode 100644 index 0000000000000..b07bd9482648c --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/okta/scim-provisioning.mdx @@ -0,0 +1,193 @@ +--- +title: "Setting up Okta SCIM provisioning for Dagster+ | Dagster Docs" + +display_name: "Okta" +feature_name: "scim_okta" +pricing_plan: "pro" +--- + +# Setting up Okta SCIM provisioning for Dagster+ + +The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. When enabled in Dagster+, SCIM allows you to efficiently and easily manage users in your Identity Provider (IdP) - in this case, Okta - and sync their information to Dagster+. + +In this guide, we'll walk you through configuring [Okta SCIM provisioning](https://developer.okta.com/docs/concepts/scim/) for Dagster+. + +--- + +## About this feature + + + + +### Supported features + +With Dagster+'s Okta SCIM provisioning feature, you can: + + + +Refer to [Okta's SCIM documentation](https://developer.okta.com/docs/concepts/scim/) for more information about Okta's SCIM offering. + + + + +### Limitations + +Dagster+ currently supports the following attributes for SCIM syncing: + +- `user.firstName` +- `user.lastName` +- `user.email`, which must match the user's username in Okta +- `user.displayName` + + + + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **To have set up Okta SSO for Dagster+.** Refer to the [Okta SSO setup guide](/dagster-plus/account/authentication/okta/saml-sso) for more info. +- **Permissions in Okta that allow you to configure applications.** +- **The following in Dagster+:** + - A Pro plan + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Enable SCIM provisioning in Dagster+ + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Provisioning** tab. +4. If SCIM provisioning isn't enabled, click the **Enable SCIM provisioning** button to enable it. +5. Click **Create SCIM token** to create an API token. This token will be used to authenticate requests from Okta to Dagster+. + +Keep the API token handy - you'll need it in the next step. + +--- + +## Step 2: Enable SCIM provisioning in Okta + +1. Sign in to your Okta Admin Dashboard. + +2. Using the sidebar, click **Applications > Applications**. + +3. Click the Dagster+ app. **Note**: If you haven't set up SSO for Okta, [follow this guide](/dagster-plus/account/authentication/okta/saml-sso)) to do so before continuing. + +4. Click the **Sign On** tab and complete the following: + + 1. Click **Edit**. + + 2. In the **Advanced Sign-on Settings** section, enter the name of your organization in the **Organization** field. + + 3. In the **Credential Details** section, set the **Application username format** field to **Email**: + + Configured Sign On tab of Dagster+ Okta application + + 4. Click **Save**. + +5. Click the **Provisioning** tab and complete the following: + + 1. Click **Configure API Integration**. + + 2. Check the **Enable API integration** checkbox that displays. + + 3. In the **API Token** field, paste the Dagster+ API token you generated in [Step 1](#step-1-enable-scim-provisioning-in-dagster): + + Configured Provisioning tab of Dagster+ Okta application + + 4. Click **Test API Credentials** to verify that your organization and API token work correctly. + + 5. When finished, click **Save**. + +--- + +## Step 3: Enable user syncing in Okta + +After you confirm that your API credentials work in the Dagster+ Okta application, you can enable user syncing: + +1. In the Dagster+ Okta app, click the **Provisioning** tab. + +2. In the **Settings** panel, click **To App**. + +3. Click **Edit**. + +4. Next to **Create Users**, check the **Enable** checkbox: + + Highlighted Create users setting and default username setting in Okta + + **Note**: The default username used to create accounts must be set to **Email** or user provisioning may not work correctly. + +5. Optionally, check **Enable** next to **Update User Attributes** and **Deactivate Users** to enable these features. + +6. When finished, click **Save**. + +--- + +## Step 4: Enable group syncing in Okta + + + This step is required only if you want to sync Okta user groups to Dagster+ as{" "} + Teams. + + +When **Push groups** is enabled in Okta, you can sync user groups from Okta to Dagster+ as [Teams](/dagster-plus/account/managing-users/managing-teams). Refer to the [Okta documentation](https://help.okta.com/oie/en-us/Content/Topics/users-groups-profiles/usgp-enable-group-push.htm) for setup instructions. + +--- + +## Next steps + +That's it! Once Okta successfully syncs users to Dagster+, synced users will have a 'synced' icon next to them in the Dagster+ users page: + + + +Refer to the [Utilizing SCIM provisioning guide](/dagster-plus/account/authentication/utilizing-scim-provisioning) for more info about how user and team management works when SCIM provisioning is enabled. + +--- + +## Related + + + + + + + + diff --git a/docs/content/dagster-plus/account/authentication/setting-up-azure-ad-saml-sso.mdx b/docs/content/dagster-plus/account/authentication/setting-up-azure-ad-saml-sso.mdx new file mode 100644 index 0000000000000..c16fb9ff0c6d4 --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/setting-up-azure-ad-saml-sso.mdx @@ -0,0 +1,140 @@ +--- +title: "Setting up Azure Active Directory SSO for Dagster+ | Dagster Docs" + +platform_type: "cloud" +display_name: "Azure AD" +feature_name: "saml_sso_azure" +pricing_plan: "pro" +--- + +# Setting up Azure Active Directory SSO for Dagster+ + +This guide is applicable to Dagster+. + +In this guide, you'll configure Azure Active Directory (AD) to use single sign-on (SSO) with your Dagster+ organization. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **An existing Azure AD account** +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Add the Dagster+ app in Azure AD + +In this step, you'll add the Dagster+ app to your list of managed SaaS apps in Azure AD. + +1. Sign in to the Azure portal. +2. On the left navigation pane, click the **Azure Active Directory** service. +3. Navigate to **Enterprise Applications** and then **All Applications**. +4. Click **New application**. +5. In the **Add from the gallery** section, type **Dagster+** in the search box. +6. Select **Dagster+** from the results panel and then add the app. Wait a few seconds while the app is added to your tenant. + +--- + +## Step 2: Configure SSO in Azure AD + +In this step, you'll configure and enable SSO for Azure AD in your Azure portal. + +1. On the **Dagster+** application integration page, locate the **Manage** section and select **single sign-on**. + +2. On the **Select a single sign-on method** page, select **SAML**. + +3. On the **Set up single sign-on with SAML** page, click the pencil icon for **Basic SAML Configuration** to edit the settings. + + Settings Dropdown + +4. In the **Basic SAML Configuration** section, fill in the **Identifier** and **Reply URL** fields as follows: + + Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + https://.dagster.cloud/auth/saml/consume + +5. Click **Set additional URLs**. + +6. In the **Sign-on URL** field, copy and paste the URL you entered in the **Identifier** and **Reply URL** fields. + +7. Next, you'll configure the SAML assertions. In addition to the default attributes, Dagster+ requires the following: + + - `FirstName` - `user.givenname` + - `LastName` - `user.surname` + - `Email` - `user.userprincipalname` + + Add these attribute mappings to the SAML assertion. + +8. On the **Set up single sign-on with SAML** page: + + 1. Locate the **SAML Signing Certificate** section. + + 2. Next to **Federation Metadata XML**, click **Download**: + + Download SAML Certificate + + When prompted, save the SAML metadata file to your computer. + +--- + +## Step 3: Upload the SAML metadata to Dagster+ + +After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + +```shell +dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud +``` + +--- + +## Step 4: Create a test user + +In this section, you'll create a test user in the Azure portal. + +1. From the left pane in the Azure portal, click **Azure Active Directory**. +2. Click **Users > All users**. +3. Click **New user** at the top of the screen. +4. In **User** properties, fill in the following fields: + - **Name**: Enter `B.Simon`. + - **User name**: Enter `B.Simon@contoso.com`. + - Select the **Show password** check box and write down the value displayed in the **Password** box. +5. Click **Create**. + +--- + +## Step 5: Test your SSO configuration + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) +- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) + +### Testing a service provider-initiated login + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + +### Testing an identity provider-initiated login + +Click **Test this application** in the Azure portal. If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/content/dagster-plus/account/authentication/setting-up-google-workspace-saml-sso.mdx b/docs/content/dagster-plus/account/authentication/setting-up-google-workspace-saml-sso.mdx new file mode 100644 index 0000000000000..fc9f97a604abe --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/setting-up-google-workspace-saml-sso.mdx @@ -0,0 +1,184 @@ +--- +title: "Setting up Google Workspace SSO for Dagster+ | Dagster Docs" + +platform_type: "cloud" +display_name: "Google Workspace" +feature_name: "saml_sso_google" +pricing_plan: "pro" +--- + +# Setting up Google Workspace SSO for Dagster+ + +This guide is applicable to Dagster+. + +In this guide, you'll configure Google Workspace to use single sign-on (SSO) with your Dagster+ organization. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **The following in Google**: + - An existing Google account + - [Workspace Admin permissions](https://support.google.com/a/answer/6365252?hl=en\&ref_topic=4388346) +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Add the Dagster+ app in Google Workspace + +1. Navigate to your Google Admin Console: + +2. Using the sidebar, navigate to **Apps > Web and mobile apps**: + + Google Workspace Sidebar + +3. On the **Web and mobile apps** page, click **Add App > Add custom SAML app**: + + Add App + + This opens a new page for adding app details. + +--- + +## Step 2: Configure SSO in Google Workspace + +1. On the **App details** page: + + 1. Fill in the **App name** field. + + 2. Fill in the **Description** field. + + The page should look similar to the following: + + Application Details + + 3. Click **Continue**. + +2. On the **Google Identity Provider details** page, click **Continue**. No action is required for this page. + +3. On the **Service provider details** page: + + 1. In the **ACS URL** and **Entity ID** fields: + + Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + https://.dagster.cloud/auth/saml/consume + + 2. Check the **Signed Response** box. + + The page should look similar to the image below. In this example, the organization's name is `hooli` and the Dagster+ domain is `https://hooli.dagster.cloud`: + + Service Provider Details + + 3. When finished, click **Continue**. + +4. On the **Attributes** page: + + 1. Click **Add mapping** to add and configure the following attributes: + + - **Basic Information > First Name** - `FirstName` + - **Basic Information > Last Name** - `LastName` + - **Basic Information > Email** - `Email` + + The page should look like the following image: + + Attribute Mapping + + 2. Click **Finish**. + +--- + +## Step 3: Upload the SAML metadata to Dagster+ + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In your Google Workspace, open the Dagster+ application you added in [Step 2](#step-2-configure-sso-in-google-workspace). + +2. Click **Download metadata**: + + SAML Metadata + +3. In the modal that displays, click **Download metadata** to start the download. Save the file to your computer. + +4. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +--- + +## Step 4: Grant access to users + +In this step, you'll assign users in your Google Workspace to the Dagster+ application. This allows members of the workspace to log in to Dagster+ using their credentials when the single sign-on flow is initiated. + +1. In the Google Workspace Dagster+ application, click **User access**. +2. Select an organizational unit. +3. Click **ON for everyone**. +4. Click **Save**. + + Assign New Login + +--- + +## Step 5: Test your SSO configuration + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) +- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) + +### Testing a service provider-initiated login + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + +### Testing an identity provider-initiated login + +In the Google Workspace portal, click on the **Dagster+ icon**. If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/content/dagster-plus/account/authentication/setting-up-onelogin-saml-sso.mdx b/docs/content/dagster-plus/account/authentication/setting-up-onelogin-saml-sso.mdx new file mode 100644 index 0000000000000..ff1c3d0a408a4 --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/setting-up-onelogin-saml-sso.mdx @@ -0,0 +1,139 @@ +--- +title: "Setting up OneLogin SSO for Dagster+ | Dagster Docs" + +platform_type: "cloud" +display_name: "OneLogin" +feature_name: "saml_sso_onelogin" +pricing_plan: "pro" +--- + +# Setting up OneLogin SSO for Dagster+ + +This guide is applicable to Dagster+. + +In this guide, you'll configure OneLogin to use single sign-on (SSO) with your Dagster+ organization. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **The following in OneLogin:** + - An existing OneLogin account + - Admin permissions +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Add the Dagster+ app in OneLogin + +1. Sign into your OneLogin portal. + +2. Navigate to **Administration > Applications**. + +3. On the **Applications** page, click **Add App**. + +4. On the **Find Applications** page, search for `Dagster+`: + + Find Applications + +5. Add and save the application. + +--- + +## Step 2: Configure SSO in OneLogin + +1. In OneLogin, open the application and navigate to its **Configuration**. + +2. In the **Dagster+ organisation name** field, enter your Dagster+ organization name. This is used to route the SAML response to the correct Dagster+ subdomain. + + For example, our organization name is `hooli` and our Dagster+ domain is `https://hooli.dagster.cloud`. To configure this correctly, we'd enter `hooli` into the **Subdomain** field. + +3. When finished, click **Done**. + +--- + +## Step 3: Upload the SAML metadata to Dagster+ + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In OneLogin, open the Dagster+ application. + +2. Navigate to **More Actions > SAML Metadata**. + +3. When prompted, save the file to your computer. + +4. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +--- + +## Step 4: Grant access to users + +Next, you'll assign users to the Dagster+ application in OneLogin. This will allow them to log in using their OneLogin credentials with the sign in flow is initiated. + +1. In Okta, navigate to **Users**. + +2. Select a user. + +3. On the user's page, click **Applications**. + +4. Assign the user to Dagster+. In the following image, we've assigned user `Test D'Test` to Dagster+: + + Assign New Login + +5. Click **Continue**. + +6. Click **Save User.** + +7. Repeat steps 2-6 for every user you want to access Dagster+. + +--- + +## Step 5: Test your SSO configuration + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) +- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) + +### Testing a service provider-initiated login + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + +### Testing an identity provider-initiated login + +In the OneLogin portal, click the Dagster+ icon: + + + +If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/content/dagster-plus/account/authentication/setting-up-pingone-saml-sso.mdx b/docs/content/dagster-plus/account/authentication/setting-up-pingone-saml-sso.mdx new file mode 100644 index 0000000000000..8b10168784423 --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/setting-up-pingone-saml-sso.mdx @@ -0,0 +1,193 @@ +--- +title: "Setting up PingOne SSO for Dagster+ | Dagster Docs" + +display_name: "PingOne" +feature_name: "saml_sso_pingone" +pricing_plan: "pro" +--- + +# Setting up PingOne SSO for Dagster+ + +This guide is applicable to Dagster+. + +In this guide, you'll configure PingOne to use single sign-on (SSO) with your Dagster+ organization. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **The following in PingOne:** + - An existing PingOne account + - Organization admin permissions +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) + - [Organization Admin permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference) in your organization + +--- + +## Step 1: Add the Dagster+ app in PingOne + +1. Sign into your PingOne Console. + +2. Using the sidebar, click **Connections > Applications**. + + PineOne Sidebar + +3. On the **Applications** page, add an application. + +4. In **Select an application type**, click **Web app**. + +5. Click **SAML > Configure**: + + Add App + +--- + +## Step 2: Configure SSO in PingOne + +1. In the **Create App Profile** page: + + 1. Add an application name, description, and icon: + + Application Details + + 2. When finished, click **Save and Continue.** + +2. In the **Configure SAML** page: + + 1. Fill in the following: + + - **ACS URLS** and **Entity ID**: Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + https://.dagster.cloud/auth/saml/consume + + - **Assertion Validity Duration**: Type `60`. + + In the following example, the organization’s name is `hooli` and the Dagster+ domain is `https://hooli.dagster.cloud`: + + Service Provider Details + + 2. When finished, click **Save and Continue.** + +3. In the **Map Attributes** page: + + 1. Configure the following attributes: + + | Application attribute | Outgoing value | + | --------------------- | -------------- | + | Email | Email Address | + | FirstName | Given Name | + | LastName | Family Name | + + The page should look similar to the following: + + Attribute Mapping + + 2. When finished, click **Save and Continue.** + +--- + +## Step 3: Upload the SAML metadata to Dagster+ + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In PingOne, open the Dagster+ application. + +2. Click the **Configuration** tab. + +3. In the **Connection Details** section, click **Download Metadata**: + + SAML Metadata + +4. When prompted, save the file to your computer. + +5. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +--- + +## Step 4: Grant access to users + +Next, you'll assign users to the Dagster+ application in PingOne. This will allow them to log in using their PingOne credentials when the single sign-on flow is initiated. + +1. In the Dagster+ application, click the **Access** tab. + +2. Click the **pencil icon** to edit the **Group membership policy**: + + Assign New Login + +3. Edit the policy as needed to grant users access to the application. + +--- + +## Step 5: Test your SSO configuration + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#testing-a-service-provider-initiated-login) +- [Identity provider (idP)-initiated login](#testing-an-identity-provider-initiated-login) + +### Testing a service provider-initiated login + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + +### Testing an identity provider-initiated login + +In the PingOne application portal, click the **Dagster+** icon: + + + +If successful, you'll be automatically signed in to your Dagster+ organization. diff --git a/docs/content/dagster-plus/account/authentication/utilizing-scim-provisioning.mdx b/docs/content/dagster-plus/account/authentication/utilizing-scim-provisioning.mdx new file mode 100644 index 0000000000000..5c84bf03aa7cd --- /dev/null +++ b/docs/content/dagster-plus/account/authentication/utilizing-scim-provisioning.mdx @@ -0,0 +1,96 @@ +--- +title: "Utilizing SCIM provisioning in Dagster+ | Dagster Docs" + +display_name: "SCIM" +feature_name: "scim" +pricing_plan: "pro" +--- + +# Utilizing SCIM provisioning in Dagster+ + +The [System for Cross-domain Identity Management specification](https://scim.cloud/) (SCIM) is a standard designed to manage user identity information. With SCIM, you can: + + + +--- + +## Understanding SCIM provisioning + +SCIM provisioning eases the burden of manually provisioning users across your cloud applications. When enabled, you can automatically sync user information from your IdP to Dagster+ and back again, ensuring user data is always up-to-date. + +For a detailed look at SCIM provisioning, [check out this blog post](https://www.strongdm.com/blog/scim-provisioning). + +### Managing users + +When SCIM is enabled in Dagster+, a few things about user management will change: + +- **New users must be added in the IdP.** The ability to add new users will be disabled in Dagster+ while SCIM is enabled. +- **Only 'unsynced' users can be removed in Dagster+.** 'Synced' users will have an icon indicating they're externally managed by the IdP, while unsynced users will not. For example, the first two users in the following image are synced, while the last isn't: + + Highlighted unsynced user in the Dagster+ UI + + You might see unsynced users in Dagster+ when: + + - **Users exist in Dagster+, but not in the IdP.** In this case, create matching users in the IdP and then provision them. This will link the IdP users to the Dagster+ users. + - **Users are assigned to the Dagster+ IdP app before provisioning is enabled.** In this case, you'll need to provision the users in the IdP to link them to the Dagster+ users. + +If you choose to disable SCIM provisioning in Dagster+, users and teams will remain as-is at the time SCIM is disabled. + +### Managing teams + +In addition to the above user management changes, there are a few things to keep in mind when managing user groups, otherwise known as Dagster+ [teams](/dagster-plus/account/managing-users/managing-teams). + +User groups in your IdP can be mapped to Dagster+ teams, allowing you to centralize the management of user groups and memberships. When SCIM is enabled: + +- **Teams can still be managed in Dagster+.** You can choose to map and sync these teams to the IdP or administer them solely in Dagster+. Synced groups should be managed only in the IdP, or changes made in Dagster+ may be overwritten when a sync is triggered from the IdP. +- **If a group exists only in the IdP** and is synced to Dagster+, you'll be prompted to either create a new Dagster+ team with the same name or create a link between the IdP group and an existing team in Dagster+. +- **If a group exists only in Dagster+**, the group will display in the IdP as an 'external' group with no members. In this case, you can either create a new group in the IdP and link it to an existing Dagster+ team, or choose to manage the team only in Dagster+. + +--- + +## Enabling SCIM provisioning + +### Prerequisites + +To use SCIM provisioning, you'll need: + +- A Dagster+ Pro plan +- [An IdP for which Dagster+ supports SSO and SCIM provisioning](#supported-identity-providers) +- Permissions in your IdP that allow you to configure SSO and SCIM provisioning + +### Supported Identity Providers + +Dagster+ currently supports SCIM provisioning for the following Identity Providers (IdP): + + + + + + +Use the setup guide for your IdP to get started. + +--- + +## Related + + + + + diff --git a/docs/content/dagster-plus/account/managing-user-agent-tokens.mdx b/docs/content/dagster-plus/account/managing-user-agent-tokens.mdx new file mode 100644 index 0000000000000..1568c46a32a17 --- /dev/null +++ b/docs/content/dagster-plus/account/managing-user-agent-tokens.mdx @@ -0,0 +1,77 @@ +--- +title: Managing user and agent tokens in Dagster+ | Dagster Docs +--- + +# Managing user and agent tokens in Dagster+ + +This guide is applicable to Dagster+. + +In this guide, we'll walk you through creating and revoking user and agent tokens in Dagster+. + +--- + +## Managing agent tokens + + + To manage agent tokens, you need to be an{" "} + Organization Admin in + Dagster+. + + +Agent tokens are used to authenticate [Hybrid agents](/dagster-plus/deployment/agents) with the Dagster+ Agents API. + +### Creating agent tokens + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Tokens** tab. +4. Click **+ Create agent token**. + +After the token is created: + +- **To view a token**, click **Reveal token**. Clicking on the token value will copy it to the clipboard. +- **To edit a token's description**, click the **pencil icon**. + +### Assigning agent token permissions + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Tokens** tab. +4. Click **Edit** next to the agent token you'd like to change. + +The permissions dialog allows you to edit a token's ability to access certain deployments. By default, agent tokens have permission to access any deployment in the organization including branch deployments. This is called **Org Agent** and is set using the toggle in the top right of the dialog. To edit individual deployment permissions, **Org Agent** has to first be toggled off. + +### Revoking agent tokens + +To revoke a token: + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Tokens** tab. +4. Click **Edit** next to the agent token you'd like to change. +5. Click **Revoke** in the bottom left of the permissions dialog. When prompted, confirm to proceed with revoking the token. + +--- + +## Managing user tokens + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Tokens** tab. +4. Click **+ Create user token**. + +After the token is created: + +- **To edit a token's description**, click the **pencil icon**. +- **To view a token**, click **Reveal token**. Clicking on the token value will copy it to the clipboard. +- **To revoke a token**, click **Revoke**. + +To manage tokens for another user, select the user from the **Manage tokens for** dropdown: + + + +**Note**: **Organization Admin** permissions are required to manage another user's tokens. diff --git a/docs/content/dagster-plus/account/managing-users.mdx b/docs/content/dagster-plus/account/managing-users.mdx new file mode 100644 index 0000000000000..5cdebc8518d0d --- /dev/null +++ b/docs/content/dagster-plus/account/managing-users.mdx @@ -0,0 +1,131 @@ +--- +title: Managing users in Dagster+ | Dagster Docs +--- + +# Managing users in Dagster+ + +This guide is applicable to Dagster+. + +In this guide, we'll cover how to add and remove users in your Dagster+ organization. + +**Note**: If utilizing [SCIM provisioning](/dagster-plus/account/authentication/utilizing-scim-provisioning), you'll need to manage users through your Identity Provider (IdP) instead of Dagster+. + +--- + +## Adding users + + + Organization Admin or Admin permissions are + required to add users in Dagster+. + + +Before you start, note that: + +- **If SCIM provisioning is enabled,** you'll need to add new users in your IdP. Adding users will be disabled in Dagster+. +- **If using Google for SSO**, users must be added in Dagster+ before they can log in. +- **If using an Identity Provider (IdP) like Okta for SSO**, users must be assigned to the Dagster app in the IdP to be able to log in to Dagster+. Refer to the [SSO setup guides](/dagster-plus/account/authentication#single-sign-on) for setup instructions for each of our supported IdP solutions. + + By default, users will be granted Viewer permissions on each deployment. The default role can be adjusted by modifying the [`sso_default_role` deployment setting](/dagster-plus/managing-deployments/deployment-settings-reference#sso-default-role). + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Users** tab. +4. Click **Add new user.** +5. In the **User email** field, enter the user's email address. +6. Click **Add user**. The user will be added to the list of users. + +After the user is created, you can [add the user to teams and assign user roles for each deployment](#managing-user-permissions). + +--- + +## Managing user permissions + + + Organization Admin permissions are required to manage users + in Dagster+. + + +After a user is created, the **Manage user permissions** window will automatically display. You can also access this window by clicking **Edit** next to a user in the users table. + + + + + +### Adding users to teams + +Teams are a Dagster+ Pro feature. + +Using the **Teams** field, you can add users to one or more teams. This is useful for centralizing permission sets for different types of users. Refer to the [Managing teams](/dagster-plus/account/managing-users/managing-teams) guide for more info about creating and managing teams. + + + + + +**Note**: When determining a user's level of access, Dagster+ will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. Refer to the [Managing user roles and permissions](/dagster-plus/account/managing-users/managing-user-roles-permissions#applying-role-overrides) guide for more info. + +### Assigning user roles + +In the **Roles** section, you can assign the select the appropriate [user role](/dagster-plus/account/managing-users/managing-user-roles-permissions) for each deployment. + +1. Next to a deployment, click **Edit user role**. +2. Select the user role for the deployment. This [user role](/dagster-plus/account/managing-users/managing-user-roles-permissions) will be used as the default for all code locations in the deployment. +3. Click **Save**. +4. **Pro only**: To set permissions for individual [code locations](/dagster-plus/account/managing-users/managing-user-roles-permissions#code-locations) in a deployment: + 1. Click the toggle to the left of the deployment to open a list of code locations. + 2. Next to a code location, click **Edit user role**. + 3. Select the user role for the code location. + 4. Click **Save**. +5. Repeat the previous steps for each deployment. +6. **Optional**: To change the user's permissions for branch deployments: + 1. Next to **All branch deployments**, click **Edit user role**. + 2. Select the user role to use for all branch deployments. + 3. Click **Save**. +7. Click **Done**. + +--- + +## Removing users + + + Organization Admin permissions are required to remove users + in Dagster+. + + +Removing a user removes them from the organization. **Note**: If using a SAML-based SSO solution like Okta, you'll also need to remove the user from the IdP. Removing the user in Dagster+ doesn't remove them from the IdP. + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Users** tab. +4. Locate the user in the user list. +5. Click **Edit**. +6. Click **Remove user**. +7. When prompted, confirm the removal. + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/account/managing-users/managing-teams.mdx b/docs/content/dagster-plus/account/managing-users/managing-teams.mdx new file mode 100644 index 0000000000000..9de3e43febe20 --- /dev/null +++ b/docs/content/dagster-plus/account/managing-users/managing-teams.mdx @@ -0,0 +1,125 @@ +--- +title: Managing teams in Dagster+ | Dagster Docs +--- + +# Managing teams in Dagster+ + +This guide is applicable to Dagster+ Pro. + +As part of our [role-based access control (RBAC) feature](/dagster-plus/account/managing-users/managing-user-roles-permissions), Dagster+ supports the ability to assign users to teams. A team is a group of users with a set of default deployment, code location, and Branch Deployment user roles. + +In this guide, we'll cover how to add, manage, and remove teams in Dagster+. + +--- + +## Prerequisites + +To use this feature, you'll need a **Dagster+ Pro plan.** + +--- + +## Adding teams + + + + Organization Admin permissions + {" "} + are required to add teams. + + +1. In the Dagster+ UI, click the **user menu (your icon) > Organization Settings**. +2. Click the **Teams** tab. +3. Click the **Create a team** button. +4. In the window that displays, enter a name in the **Team name** field. +5. Click **Create team**. + +After the team is created, you can [add users and assign user roles to deployments](#managing-team-members-and-roles). + +--- + +## Managing team members and roles + +In the **Organization Settings > Teams** tab: + +1. Locate the team you want to modify in the table of teams. +2. Click the **Edit** button in the team's row. + +From here, you can [manage team members](#managing-team-members) and [the team's roles for deployments](#managing-team-roles). + +### Managing team members + + + + Organization Admin permissions + {" "} + are required to add and remove team members. + + +#### Adding team members + +1. In the **Members** tab, use the search bar to locate a user in your organization. +2. Once located, click the user. +3. Click **Add user to team**. +4. Repeat as needed, clicking **Done** when finished. + +#### Removing team members + +1. In the **Members** tab, locate the user in the list of team members. +2. Click **Remove from team**. +3. Repeat as needed, clicking **Done** when finished. + +### Managing team roles + + + + Organization Admin or Admin permissions + {" "} + are required to manage team roles. Additionally, Admins can only manage teams + for deployments where they are an Admin. + + +1. In the **Roles** tab, click the **Edit team role** button next to the deployment where you want to modify the team's role. +2. In the window that displays, select the team role for the deployment. This [role](/dagster-plus/account/managing-users/managing-user-roles-permissions) will be used as the default for this team for all code locations in the deployment. +3. Click **Save**. +4. To set permissions for individual [code locations](/dagster-plus/account/managing-users/managing-user-roles-permissions#code-locations) in a deployment: + 1. Click the toggle to the left of the deployment to open a list of code locations. + 2. Next to a code location, click **Edit team role**. + 3. Select the team role for the code location. + 4. Click **Save**. + +--- + +## Removing teams + + + + Organization Admin permissions + {" "} + are required to remove teams. + + +1. In the Dagster+ UI, click the **user menu (your icon) > Organization Settings**. +2. Click the **Teams** tab. +3. Locate the team you want to delete in the table of teams. +4. Click the **Edit** button in the team's row. +5. Click the **Delete team** button. +6. When prompted, confirm the deletion. + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-cloud/account/managing-users/managing-user-roles-permissions.mdx b/docs/content/dagster-plus/account/managing-users/managing-user-roles-permissions.mdx similarity index 84% rename from docs/content/dagster-cloud/account/managing-users/managing-user-roles-permissions.mdx rename to docs/content/dagster-plus/account/managing-users/managing-user-roles-permissions.mdx index 05a959a47e506..f09573cff9e94 100644 --- a/docs/content/dagster-cloud/account/managing-users/managing-user-roles-permissions.mdx +++ b/docs/content/dagster-plus/account/managing-users/managing-user-roles-permissions.mdx @@ -1,36 +1,36 @@ --- -title: Understanding role-based access control in Dagster Cloud | Dagster Docs +title: Understanding role-based access control in Dagster+ | Dagster Docs --- -# Understanding role-based access control in Dagster Cloud +# Understanding role-based access control in Dagster+ -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. -Role-based access control (RBAC) enables you to grant specific permissions to users in your organization, ensuring that Dagster users have access to what they require in Dagster Cloud, and no more. +Role-based access control (RBAC) enables you to grant specific permissions to users in your organization, ensuring that Dagster users have access to what they require in Dagster+, and no more. -In this guide, we'll cover how RBAC works in Dagster Cloud, how to assign roles to users, and the granular permissions for each user role. +In this guide, we'll cover how RBAC works in Dagster+, how to assign roles to users, and the granular permissions for each user role. --- -## Dagster Cloud user roles +## Dagster+ user roles -Dagster Cloud uses a hierarchical model for RBAC, meaning that the most permissive roles include permissions from the roles beneath them. The following user roles are currently supported, in order from the **most** permissive to the **least** permissive: +Dagster+ uses a hierarchical model for RBAC, meaning that the most permissive roles include permissions from the roles beneath them. The following user roles are currently supported, in order from the **most** permissive to the **least** permissive: - Organization Admin - Admin - Editor -- Launcher (Enterprise plans only) -- Viewer (Enterprise plans only) +- Launcher (Pro plans only) +- Viewer -For example, the **Admin** user role includes permissions specific to this role and all permissions in the **Editor**, **Launcher**, and **Viewer** user roles. Refer to the [User permissions reference](#user-permissions-reference) for the full list of user permissions in Dagster Cloud. +For example, the **Admin** user role includes permissions specific to this role and all permissions in the **Editor**, **Launcher**, and **Viewer** user roles. Refer to the [User permissions reference](#user-permissions-reference) for the full list of user permissions in Dagster+. ### User role enforcement -All user roles are enforced both in Dagster Cloud and the [GraphQL API](/concepts/webserver/graphql). +All user roles are enforced both in Dagster+ and the [GraphQL API](/concepts/webserver/graphql). ### Teams -Dagster Cloud Enterprise users can create teams of users and assign default permission sets. Refer to the [Managing teams in Dagster Cloud](/dagster-cloud/account/managing-users/managing-teams) guide for more info. +Dagster+ Pro users can create teams of users and assign default permission sets. Refer to the [Managing teams in Dagster+](/dagster-plus/account/managing-users/managing-teams) guide for more info. --- @@ -38,7 +38,7 @@ Dagster Cloud Enterprise users can create teams of users and assign default perm With the exception of the **Organization Admin** role, user and team roles are set on a per-deployment basis. -Organization Admins have access to the entire organization, including all [full deployments](/dagster-cloud/managing-deployments/managing-deployments), [code locations](/dagster-cloud/managing-deployments/code-locations), and [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments). +Organization Admins have access to the entire organization, including all [full deployments](/dagster-plus/managing-deployments/managing-deployments), [code locations](/dagster-plus/managing-deployments/code-locations), and [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments). @@ -67,12 +67,12 @@ Organization Admins have access to the entire organization, including all [full - + @@ -98,7 +98,7 @@ Organization Admins have access to the entire organization, including all [full ### Applying role overrides -This section is applicable to Dagster Cloud Enterprise. +This section is applicable to Dagster+ Pro plans. As previously mentioned, you can define individual user roles for users in your organization. You can also apply permission overrides to grant specific exceptions. @@ -122,7 +122,7 @@ To override a code location role for an individual user: #### Team members -Users in your organization can belong to one or more [teams](/dagster-cloud/account/managing-users/managing-teams). When determining a user's level of access, Dagster Cloud will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. +Users in your organization can belong to one or more [teams](/dagster-plus/account/managing-users/managing-teams). When determining a user's level of access, Dagster+ will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. For example, let's look at a user with the following roles for our `dev` deployment: @@ -139,7 +139,7 @@ The above also applies to code locations and Branch Deployment roles. To view deployment-level overrides for a specific user, locate the user on the **Users** page and hover over a deployment: @@ -277,7 +276,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic @@ -290,7 +289,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic @@ -317,7 +316,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic @@ -344,7 +343,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic @@ -357,7 +356,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic @@ -392,7 +391,7 @@ Code locations are accessed in the UI by navigating to **Deployment > Code locat @@ -405,7 +404,7 @@ Code locations are accessed in the UI by navigating to **Deployment > Code locat @@ -418,7 +417,7 @@ Code locations are accessed in the UI by navigating to **Deployment > Code locat @@ -513,12 +512,12 @@ User tokens are accessed in the UI by navigating to **user menu (your icon) > Or - - + + @@ -528,7 +527,7 @@ User tokens are accessed in the UI by navigating to **user menu (your icon) > Or - + @@ -536,7 +535,7 @@ User tokens are accessed in the UI by navigating to **user menu (your icon) > Or - + @@ -563,7 +562,7 @@ User management is accessed in the UI by navigating to **user menu (your icon) > @@ -621,7 +620,7 @@ Team management is accessed in the UI by navigating to **user menu (your icon) > @@ -693,10 +692,7 @@ Team management is accessed in the UI by navigating to **user menu (your icon) > @@ -714,7 +710,7 @@ Team management is accessed in the UI by navigating to **user menu (your icon) > @@ -724,7 +720,7 @@ Team management is accessed in the UI by navigating to **user menu (your icon) > @@ -767,15 +763,15 @@ Team management is accessed in the UI by navigating to **user menu (your icon) > diff --git a/docs/content/dagster-plus/best-practices.mdx b/docs/content/dagster-plus/best-practices.mdx new file mode 100644 index 0000000000000..b073bfc25622c --- /dev/null +++ b/docs/content/dagster-plus/best-practices.mdx @@ -0,0 +1,16 @@ +--- +title: "Dagster+ Best Practices | Dagster Docs" + +platform_type: "cloud" +--- + +# Managing Dagster+ organization settings + +Make decisions about your deployment based on your organization's needs. + + + + diff --git a/docs/content/dagster-plus/best-practices/managing-multiple-projects-and-teams.mdx b/docs/content/dagster-plus/best-practices/managing-multiple-projects-and-teams.mdx new file mode 100644 index 0000000000000..b18d2e1f8a81d --- /dev/null +++ b/docs/content/dagster-plus/best-practices/managing-multiple-projects-and-teams.mdx @@ -0,0 +1,424 @@ +--- +title: "Managing multiple projects & teams in Dagster+ | Dagster Docs" +description: "How to set up and manage multiple projects with Dagster+ Hybrid deployments." +--- + +# Managing multiple projects & teams in Dagster+ + +In this guide, we'll cover some strategies for managing multiple projects/code bases and teams in a Dagster+ account. + +--- + +## Separating code bases + + + In this section, repository refers to a version control system, such as Git or + Mercurial. + + +If you want to manage complexity or divide your work into areas of responsibility, consider isolating your code bases into multiple projects with: + +- Multiple directories in a single repository, or +- Multiple repositories + +Refer to the following table for more information, including the pros and cons of each approach. + +
    Code locationEnterprisePro Defines the level of access for a given code location in a deployment.

    - Dagster Cloud Enterprise users can + Dagster+ Pro users can override the default deployment-level role for individual code locations . For example, if the Deployment role is @@ -81,8 +81,8 @@ Organization Admins have access to the entire organization, including all [full as Editor or Admin.

    - For non-Enterprise users, users will have the same level of access for - all code locations in a deployment. + For non-Pro users, users will have the same level of access for all code + locations in a deployment.
    - Start and stop{" "} - schedules + Start and stop schedules
    View{" "} - + deployments
    Modify{" "} - + deployment {" "} settings @@ -304,7 +303,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic
    Create, edit, delete{" "} - + environment variables
    View{" "} - + environment variable {" "} values @@ -331,7 +330,7 @@ Deployment settings are accessed in the UI by navigating to **user menu (your ic
    Export{" "} - + environment variables
    Create and delete{" "} - + deployments
    Create{" "} - + Branch Deployments
    View - + code locations
    Create and remove - + code locations
    Reload{" "} - + code locations {" "} and workspaces @@ -454,7 +453,7 @@ Agent tokens are accessed in the UI by navigating to **user menu (your icon) > O
    View{" "} - + agent tokens
    View and create own{" "} - + user tokens
    - View users + View users
    - + View teams
    - Manage{" "} - - alerts - + Manage alerts
    - Administer SAML + Administer SAML
    - + Manage SCIM
    + + + + + + + + + + + + + + + + + + + + + + + + +
    + Approach + + Multiple directories in a single repository + Multiple repositories
    + How it works + + You can use a single repository to manage multiple projects by placing + each project in a separate directory. Depending on your VCS, you may be + able to set{" "} + + code owners + {" "} + to restrict who can modify each project. + + For stronger isolation, you can use multiple repositories to manage + multiple projects. +
    + Pros + +
      +
    • + Simple to implement +
    • +
    • Facilitates code sharing between projects
    • +
    +
    +
      +
    • + Stronger isolation between projects and teams +
    • +
    • + Each project has its own CI/CD pipeline and be deployed + independently +
    • +
    • Dependencies between projects can be managed independently
    • +
    +
    + Cons + +
      +
    • + All projects share the same CI/CD pipeline and cannot be deployed + independently +
    • +
    • + Shared dependencies between projects may cause conflicts and require + coordination between teams +
    • +
    +
    + Code sharing between projects require additional coordination to publish + and reuse packages between projects +
    + +### Deployment configuration + +Whether you use a single repository or multiple, you can use a [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml) to define the code locations to deploy. For each repository, follow the [steps appropriate to your CI/CD provider](/dagster-plus/getting-started#step-4-configure-cicd-for-your-project) and include only the code locations that are relevant to the repository in your CI/CD workflow. + +#### Example with GitHub CI/CD on Hybrid deployment + +1. **For each repository**, use the CI/CD workflow provided in [Dagster+ Hybrid quickstart repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/dagster-cloud-deploy.yml). + +2. **For each project in the repository**, configure a code location in the [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml): + + ```yaml + # dagster_cloud.yml + + locations: + - location_name: project_a + code_source: + package_name: project_a + build: + # ... + - location_name: project_b + code_source: + package_name: project_b + build: + # ... + ``` + +3. In the repository's `dagster-cloud-deploy.yml` file, modify the CI/CD workflow to deploy all code locations for the repository: + + ```yaml + # .github/workflows/dagster-cloud-deploy.yml + + jobs: + dagster-cloud-deploy: + # ... + steps: + - name: Update build session with image tag for "project_a" code location + id: ci-set-build-output-project-a + if: steps.prerun.outputs.result != 'skip' + uses: dagster-io/dagster-cloud-action/actions/utils/dagster-cloud-cli@v0.1 + with: + command: "ci set-build-output --location-name=project_a --image-tag=$IMAGE_TAG" + + - name: Update build session with image tag for "project_b" code location + id: ci-set-build-output-project-b + if: steps.prerun.outputs.result != 'skip' + uses: dagster-io/dagster-cloud-action/actions/utils/dagster-cloud-cli@v0.1 + with: + command: "ci set-build-output --location-name=project_b --image-tag=$IMAGE_TAG" + # ... + ``` + +--- + +## Isolating execution context between projects + +Separating execution context between projects can have several motivations: + +- Facilitating separation of duty between teams to prevent access to sensitive data +- Differing compute environments and requirements, such as different architecture, cloud provider, etc. +- Reducing impact on other projects. For example, a project with a large number of runs can impact the performance of other projects. + +In order from least to most isolated, there are three levels of isolation: + +- [Code location](#code-location-isolation) +- [Agent](#agent-isolation) +- [Deployment](#deployment-isolation) + +### Code location isolation + +If you have no specific requirements for isolation beyond the ability to deploy and run multiple projects, you can use a single agent and deployment to manage all your projects as individual code locations. + + + + + + + + + + + + + + + + +
    + Pros + Cons
    +
      +
    • + Simplest and most cost-effective solution +
    • +
    • User access control can be set at the code location level
    • +
    • Single glass pane to view all assets
    • +
    +
    +
      +
    • + No isolation between execution environments +
    • +
    +
    + +### Agent isolation + + + Agent queues are a Dagster+ Pro feature available on hybrid deployment. + + +Using the [agent routing feature](/dagster-plus/deployment/agents/running-multiple-agents#routing-requests-to-specific-agents), you can effectively isolate execution environments between projects by using a separate agent for each project. + +Motivations for utilizing this approach could include: + +- Different compute requirements, such as different cloud providers or architectures +- Optimizing for locality or access, such as running the data processing closer or in environment with access to the storage locations + + + + + + + + + + + + + + + + +
    + Pros + Cons
    +
      +
    • + Isolation between execution environments +
    • +
    • User access control can be set at the code location level
    • +
    • Single glass pane to view all assets
    • +
    +
    Extra work to set up additional agents and agent queues
    + +### Deployment isolation + +Multiple deployments are only available in Dagster+ Pro. + +Of the approaches outlined in this guide, multiple deployments are the most isolated solution. The typical motivation for this isolation level is to separate production and non-production environments. It may be considered to satisfy other organization specific requirements. + + + + + + + + + + + + + + + + +
    + Pros + Cons
    +
      +
    • + Isolation between assets and execution environments +
    • +
    • + User access control can be set at the code location and deployment + level +
    • +
    +
    + No single glass pane to view all assets (requires switching between + multiple deployments in the UI) +
    + +--- + +## Related + + + + + + + + diff --git a/docs/content/dagster-plus/deployment.mdx b/docs/content/dagster-plus/deployment.mdx new file mode 100644 index 0000000000000..1535cc81f2083 --- /dev/null +++ b/docs/content/dagster-plus/deployment.mdx @@ -0,0 +1,27 @@ +--- +title: "Dagster+ deployment types | Dagster Docs" +--- + +# Dagster+ deployment types + +Dagster+ currently offers two deployment options to meet your needs: Serverless and Hybrid. + +--- + +## Serverless deployments + +A Serverless deployment allows you to run Dagster jobs without spinning up any infrastructure. This fully-managed version of Dagster+ is the easiest way to get started with Dagster. + +[Learn more about Serverless deployments](/dagster-plus/deployment/serverless). + +--- + +## Hybrid deployments + +A Hybrid deployment utilizes a combination of your infrastructure and Dagster-hosted backend services. + +The Dagster backend services - including the web frontend, GraphQL API, metadata database, and daemons (responsible for executing schedules and sensors) - are hosted in Dagster+. + +An [agent](/dagster-plus/deployment/agents) you run in your infrastructure is responsible for executing your Dagster code. + +[Learn more about Hybrid deployments](/dagster-plus/deployment/hybrid). diff --git a/docs/content/dagster-plus/deployment/agents.mdx b/docs/content/dagster-plus/deployment/agents.mdx new file mode 100644 index 0000000000000..dc3130545c8a3 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents.mdx @@ -0,0 +1,159 @@ +--- +title: "Dagster+ Hybrid agents | Dagster Docs" + +platform_type: "cloud" +--- + +# Dagster+ Hybrid agents + +For [Hybrid deployments](/dagster-plus/deployment/hybrid), Dagster+ uses an agent that is responsible for executing your code. The agent streams metadata about code execution over HTTPS to Dagster+’s Agent API. + +Dagster+ currently supports agents running on the following backends: + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Name + How it works
    + Amazon Elastic Container Service (ECS) + + The Amazon Elastic Container Service (ECS) agent executes Dagster jobs + as Amazon Web Services (AWS) ECS tasks. This agent is appropriate for + scaled production deployments; ECS is a good choice for teams who have + already standardized on ECS or who don't plan to run their own container + orchestration infrastructure. +
    + Docker + + The Docker agent executes Dagster jobs in Docker containers on your + computer. +
    + Kubernetes + + The Kubernetes agent executes Dagster jobs on a Kubernetes cluster. This + agent is appropriate for scaled production deployments and is a good + choice for teams who have already standardized on Kubernetes. +
    + Local + + The local agent executes Dagster jobs as processes on your computer. +
    + +--- + +## Amazon ECS agents + + + + + + + + + +--- + +## Docker agents + + + + + + +--- + +## Kubernetes agents + + + + + + +--- + +## Local agents + + + + + +--- + +## Customizing agents + + + + + + diff --git a/docs/content/dagster-plus/deployment/agents/amazon-ecs.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs.mdx new file mode 100644 index 0000000000000..e030f0c87bbaa --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs.mdx @@ -0,0 +1,32 @@ +--- +title: "Dagster+ Amazon ECS agents | Dagster Docs" +--- + +# Dagster+ Amazon ECS agents + +Used with a Dagster+ [Hybrid deployment](/dagster-plus/deployment/hybrid), the Amazon Elastic Container Service (ECS) agent executes Dagster jobs as Amazon Web Services (AWS) ECS tasks. + +This agent is appropriate for scaled production deployments; ECS is a good choice for teams who have already standardized on ECS or who don't plan to run their own container orchestration infrastructure. + + + + + + + + diff --git a/docs/content/dagster-plus/deployment/agents/amazon-ecs/configuration-reference.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs/configuration-reference.mdx new file mode 100644 index 0000000000000..39bad9ed1f086 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs/configuration-reference.mdx @@ -0,0 +1,341 @@ +--- +title: Amazon ECS agent configuration reference | Dagster Docs + +platform_type: "cloud" +--- + +# Amazon ECS agent configuration reference + +This guide is applicable to Dagster+. + +This reference describes the various configuration options Dagster+ currently supports for [Amazon ECS agents](/dagster-plus/deployment/agents/amazon-ecs). + +--- + +## Per-location configuration + +When [adding a code location](/dagster-plus/managing-deployments/code-locations) to Dagster+ with an Amazon ECS agent, you can use the `container_context` key on the location configuration to add additional ECS-specific configuration that will be applied to any ECS tasks associated with that code location. + +**Note**: If you're using the Dagster+ Github action, the `container_context` key can also be set for each location in your `dagster_cloud.yaml` file. + +The following example [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) file illustrates the available fields: + +```yaml +locations: + - location_name: cloud-examples + image: dagster/dagster-cloud-examples:latest + code_source: + package_name: dagster_cloud_examples + container_context: + ecs: + env_vars: + - DATABASE_NAME=staging + - DATABASE_PASSWORD + secrets: + - name: "MY_API_TOKEN" + valueFrom: "arn:aws:secretsmanager:us-east-1:123456789012:secret:FOO-AbCdEf:token::" + - name: "MY_PASSWORD" + valueFrom: "arn:aws:secretsmanager:us-east-1:123456789012:secret:FOO-AbCdEf:password::" + secrets_tags: + - "my_tag_name" + server_resources: # Resources for code servers launched by the agent for this location + cpu: 256 + memory: 512 + replica_count: 1 + run_resources: # Resources for runs launched by the agent for this location + cpu: 4096 + memory: 16384 + execution_role_arn: arn:aws:iam::123456789012:role/MyECSExecutionRole + task_role_arn: arn:aws:iam::123456789012:role/MyECSTaskRole + mount_points: + - sourceVolume: myEfsVolume + containerPath: "/mount/efs" + readOnly: True + volumes: + - name: myEfsVolume + efsVolumeConfiguration: + fileSystemId: fs-1234 + rootDirectory: /path/to/my/data + server_sidecar_containers: + - name: DatadogAgent + image: public.ecr.aws/datadog/agent:latest + environment: + - name: ECS_FARGATE + value: true + run_sidecar_containers: + - name: DatadogAgent + image: public.ecr.aws/datadog/agent:latest + environment: + - name: ECS_FARGATE + value: true + server_ecs_tags: + - key: MyEcsTagKey + value: MyEcsTagValue + run_ecs_tags: + - key: MyEcsTagKeyWithoutValue + repository_credentials: MyRepositoryCredentialsSecretArn +``` + +### Environment variables and secrets + + + +Refer to the following guides for more info about environment variables: + +- [Dagster+ environment variables and secrets](/dagster-plus/managing-deployments/environment-variables-and-secrets) +- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) + +--- + +## Per-job configuration: Resource limits + +You can use job tags to customize the CPU and memory of every run for that job: + +```py +from dagster import job, op + +@op() +def my_op(context): + context.log.info('running') + +@job( + tags = { + "ecs/cpu": "256", + "ecs/memory": "512", + } +) +def my_job(): + my_op() +``` + +[Fargate tasks only support certain combinations of CPU and memory.](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-cpu-memory-error.html) + +If the `ecs/cpu` or `ecs/memory` tags are set, they will override any defaults set on the code location or the deployment. + +--- + +## Per-deployment configuration + +This section describes the properties of the `dagster.yaml` configuration file used by Amazon ECS agents. Typically, this file is created by the CloudFormation template that deploys the agent and can be found within the agent task definition's command. + +To change these properties, edit the CloudFormation template and redeploy the CloudFormation stack. + +```yaml +instance_class: + module: dagster_cloud + class: DagsterCloudAgentInstance + +dagster_cloud_api: + agent_token: + deployments: + - + - + branch_deployments: + +user_code_launcher: + module: dagster_cloud.workspace.ecs + class: EcsUserCodeLauncher + config: + cluster: + subnets: + - + - + security_group_ids: + - + service_discovery_namespace_id: + execution_role_arn: + task_role_arn: + log_group: + launch_type: <"FARGATE"|"EC2"> + server_process_startup_timeout: + server_resources: + cpu: + memory: + server_sidecar_containers: + - name: SidecarName + image: SidecarImage + + run_resources: + cpu: + memory: + run_sidecar_containers: + - name: SidecarName + image: SidecarImage + + mount_points: + - + volumes: + - + server_ecs_tags: + - key: MyEcsTagKey + value: MyEcsTagValue + run_ecs_tags: + - key: MyEcsTagKeyWithoutValue + repository_credentials: MyRepositoryCredentialsSecretArn + +isolated_agents: + enabled: +agent_queues: + include_default_queue: + queues: + - + - +``` + +### dagster_cloud_api properties + + + + An agent token for the agent to use for authentication. + + + The names of full deployments for the agent to serve. + + + Whether the agent should serve all branch deployments. + + + +### user_code_launcher properties + + + + The name of an ECS cluster with a Fargate or EC2 capacity provider. + + + An ECS launch type to use for your launched ECS tasks. The following are currently supported: +
      +
    • FARGATE
    • +
    • EC2 - Note: Using this launch type requires you to have an EC2 capacity provider installed and additional operational overhead to run the agent.
    • +
    +
    + + At least one subnet is required. Dagster+ tasks require a route to the internet so they can access our API server. How this requirement is satisfied depends on the type of subnet provided: + +
      +
    • + Public subnets - The ECS agent will assign each task a public IP address. Note that ECS tasks on EC2 launched within public subnets do not have access to the internet, so a public subnet will only work for Fargate tasks. +
    • +
    • + Private subnets - The ECS agent assumes you've configured a NAT gateway with an attached NAT gateway. Tasks will not be assigned a public IP address. +
    • +
    +
    + + A list of security groups to use for tasks launched by the agent. + + + The name of a private DNS namespace. +



    + The ECS agent launches each code location as its own ECS service. The agent communicates with these services via AWS CloudMap service discovery. +
    + + The ARN of the Amazon ECS task execution IAM role. This role allows ECS to interact with AWS resources on your behalf, such as getting an image from ECR or pushing logs to CloudWatch. +



    + Note: This role must include a trust relationship that allows ECS to use it. +
    + + The ARN of the Amazon ECS task IAM role. This role allows the containers running in the ECS task to interact with AWS. +



    + Note: This role must include a trust relationship that allows ECS to use it. +
    + + The name of a CloudWatch log group. + + + The amount of time, in seconds, to wait for code to import when launching a new service for a code location. If your code takes an unusually long time to load after your ECS task starts up and results in timeouts in the Deployment tab, you can increase this setting above the default. Note This setting isn't applicable to the time it takes for a job to execute. +
      +
    • Default - 180 (seconds)
    • +
    +
    + + How long (in seconds) to wait for ECS to spin up a new service and task for a code server. If your ECS tasks take an unusually long time to start and result in timeouts, you can increase this setting above the default. +
      +
    • Default - 300 (seconds)
    • +
    +
    + + How long (in seconds) to continue polling if an ECS API endpoint fails during creation of a new code server (because the ECS API is eventually consistent). +
      +
    • Default - 30 (seconds)
    • +
    +
    + + The resources that the agent should allocate to the ECS service for each code location that it creates. If set, must be a dictionary with a cpu and/or memory key. Note: Fargate tasks only support certain combinations of CPU and memory. + + + Additional sidecar containers to include along with the Dagster container. If set, must be a list of dictionaries with valid ECS container definitions. + + + The resources that the agent should allocate to the ECS task that it creates for each run. If set, must be a dictionary with a cpu and/or memory key. Note: Fargate tasks only support certain combinations of CPU and memory. + + + Additional sidecar containers to include along with the Dagster container. If set, must be a list of dictionaries with valid ECS container definitions. + + + Mount points to include in the Dagster container. If set, should be a list of dictionaries matching the mountPoints field when specifying a container definition to boto3. + + + Additional volumes to include in the task definition. If set, should be a list of dictionaries matching the volumes argument to register_task_definition in boto3. + + + Additional ECS tags to include in the service for each code location. If set, must be a list of dictionaries, each with a key key and optional value key. + + + Additional ECS tags to include in the task for each run. If set, must be a list of dictionaries, each with a key key and optional value key. + + + Optional arn of the secret to authenticate into your private container registry. This does not apply if you are leveraging ECR for your images, see the AWS private auth guide. + + +
    + +### isolated_agents properties + + + + When enabled, agents are isolated and will not be able to access each + others' resources. See the + + Running multiple agents guide + + for more information. + + + +### agent_queues properties + +These settings specify the queue(s) the agent will obtain requests from. See [Routing requests to specific agents](/dagster-plus/deployment/agents/running-multiple-agents#routing-requests-to-specific-agents). + + + + This agent process requests from the default queue if set to true. + + + A list of additional queues to include in the agent's processing. + + diff --git a/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx new file mode 100644 index 0000000000000..c368ed4325983 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc.mdx @@ -0,0 +1,91 @@ +--- +title: Creating an Amazon Elastic Container Service agent in an existing VPC | Dagster Docs +--- + +# Creating an Amazon Elastic Container Service agent in an existing VPC + +This guide is applicable to Dagster+. + +In this guide, you'll set up and deploy an Amazon Elastic Container Service (ECS) agent in an existing VPC using CloudFormation. Amazon ECS agents are used to launch user code in ECS tasks. + +Our CloudFormation template allows you to quickly spin up the ECS agent stack in an existing VPC. It also supports using a new or existing ECS cluster. The template code can be found [here](https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent.yaml). Refer to the [CloudFormation docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html) for more info about CloudFormation. + +**For info about deploying an ECS agent in a new VPC**, check out the [ECS agents in new VPCs guide](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **In Dagster+**: + + - **Your organization and deployment names.** + - **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. + +- **In Amazon Web Services (AWS)**: + - **An existing VPC with the following:** + - **Subnets with access to the public internet**. Refer to the [AWS Work with VPCs guide](https://docs.aws.amazon.com/vpc/latest/userguide/working-with-vpcs.html) for more info. + - **Enabled `enableDnsHostnames` and `enableDnsSupport` DNS attributes**. Refer to the [AWS DNS attributes documentation](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-dns.html#vpc-dns-support) for more info. + - **Optional**: An existing ECS cluster with a [Fargate or EC2 capacity provider](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html). The CloudFormation template will create a cluster for you if one isn't specified. + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Install the CloudFormation stack in AWS + +Click the **Launch Stack** button to install the CloudFormation stack in your AWS account: + +[](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent.yaml) + +**Note**: Creating the CloudFormation stack may take a few minutes. Refresh the [AWS console **Stacks** page](https://console.aws.amazon.com/cloudformation/home#/stacks) to check the status. + +--- + +## Step 3: Configure the agent + +After the stack is installed, you'll be prompted to configure it. In the ECS wizard, fill in the following fields: + +- **Dagster+ Organization**: Enter the name of your Dagster+ organization. +- **Dagster+ Deployment**: Enter the name of the Dagster+ deployment you want to use. Leave this field empty if the agent will only serve Branch deployments. +- **Enable Branch Deployments**: Whether to have this agent serve your ephemeral [Branch deployments](/dagster-plus/managing-deployments/branch-deployments). Only a single agent should have this setting enabled. +- **Agent Token**: Paste the agent token you generated in [Step 1](#step-1-generate-a-dagster-agent-token). +- **Deploy VPC**: The existing VPC to deploy the agent into. +- **Deploy VPC Subnet**: A public subnet of the existing VPC to deploy the agent into. +- **Existing ECS Cluster**: Optionally, the name of an existing ECS cluster to deploy the agent in. Leave blank to create a new cluster +- **Task Launch Type**: Optionally, the launch type to use for new tasks created by the agent (FARGATE or EC2). Defaults to FARGATE. + +The page should look similar to the following image. In this example, our organization name is `hooli` and our deployment is `prod`: + + + +After you've finished configuring the stack in AWS, you can view the agent in Dagster+. To do so, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: + + + +--- + +## Next steps + +Now that you've got your agent running, what's next? + +- **If you're getting Dagster+ set up**, the next step is to [add a code location](/dagster-plus/managing-deployments/code-locations) using the agent. + +- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster+](/dagster-plus/managing-deployments/code-locations) guide for more info. + +If you need to upgrade your ECS agent's CloudFormation template, refer to the [upgrade guide](/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template) for more info. diff --git a/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx new file mode 100644 index 0000000000000..ed9aa52d61637 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc.mdx @@ -0,0 +1,96 @@ +--- +title: Creating an Amazon Elastic Container Service agent in a new VPC | Dagster Docs +--- + +# Creating an Amazon Elastic Container Service agent in a new VPC + +This guide is applicable to Dagster+. + +In this guide, you'll set up and deploy an Amazon Elastic Container Service (ECS) agent in a new VPC using CloudFormation. Amazon ECS agents are used to launch user code in ECS tasks. + +Our CloudFormation template allows you to quickly spin up the ECS agent stack. This template sets up an ECS agent from scratch, creating a new VPC and ECS cluster for the agent to run in. The template code can be found [here](https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent-vpc.yaml). Refer to the [CloudFormation docs](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html) for more info about CloudFormation. + +**For info about deploying an ECS agent in an existing VPC**, check out the [ECS agents in existing VPCs guide](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **In Dagster+**: + + - **Your organization and deployment names.** + - **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. + +- **In Amazon Web Services (AWS), you'll need an account**: + + - **Under its VPC quota limit in the region where you're spinning up the agent.** By default, AWS allows **five VPCs per region**. If you're already at your limit, refer to the [AWS VPC quotas documentation](https://docs.aws.amazon.com/vpc/latest/userguide/amazon-vpc-limits.html) for info on requesting a quota increase. + + - **With an ECS service-linked IAM role**. This role is required to complete the setup in ECS. AWS will automatically create the role in your account the first time you create an ECS cluster in the console. However, the IAM role isn't automatically created when ECS clusters are created via CloudFormation. + + If your account doesn't have this IAM role, running the CloudFormation template may fail. + + If you haven't created an ECS cluster before, complete one of the following before proceeding: + + - Create one using the [first run wizard](https://console.aws.amazon.com/ecs/home#/firstRun), or + - Create the IAM role using the [AWS CLI](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using-service-linked-roles.html#create-service-linked-role) + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Install the CloudFormation stack in AWS + +Click the **Launch Stack** button to install the CloudFormation stack in your AWS account: + +[](https://console.aws.amazon.com/cloudformation/home#/stacks/create/review?templateURL=https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent-vpc.yaml) + +**Note**: Creating the CloudFormation stack may take a few minutes. Refresh the [AWS console **Stacks** page](https://console.aws.amazon.com/cloudformation/home#/stacks) to check the status. + +If the installation fails, verify that your AWS account [meets the requirements listed above](#prerequisites). + +--- + +## Step 3: Configure the agent + +After the stack is installed, you'll be prompted to configure it. In the ECS wizard, fill in the following fields: + +- **Dagster+ Organization**: Enter the name of your Dagster+ organization. +- **Dagster+ Deployment**: Enter the name of the Dagster+ deployment you want to use. Leave this field empty if the agent will only serve Branch deployments. +- **Enable Branch Deployments**: Whether to have this agent serve your ephemeral [Branch deployments](/dagster-plus/managing-deployments/branch-deployments). Only a single agent should have this setting enabled. +- **Agent Token**: Paste the agent token you generated in [Step 1](#step-1-generate-a-dagster-agent-token). + +The page should look similar to the following image. In this example, our organization name is `hooli` and our deployment is `prod`: + + + +After you've finished configuring the stack in AWS, you can view the agent in Dagster+. To do so, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: + + + +--- + +## Next steps + +Now that you've got your agent running, what's next? + +- **If you're getting Dagster+ set up**, the next step is to [add a code location](/dagster-plus/managing-deployments/code-locations) using the agent. + +- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster+](/dagster-plus/managing-deployments/code-locations) guide for more info. + +If you need to upgrade your ECS agent's CloudFormation template, refer to the [upgrade guide](/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template) for more info. diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx similarity index 85% rename from docs/content/dagster-cloud/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx rename to docs/content/dagster-plus/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx index 5533ef0628f33..7fdbdf72cc051 100644 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs/manually-provisioning-ecs-agent.mdx @@ -6,7 +6,7 @@ platform_type: "cloud" # Creating a manually-provisioned Amazon Elastic Container Services agent -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. In this guide, you'll manually set up and deploy an Amazon Elastic Container Service (ECS) agent. Amazon ECS agents are used to launch user code in ECS tasks. @@ -18,10 +18,10 @@ This method of setting up an Amazon ECS agent is a good option if you're comfort To complete the steps in this guide, you'll need: -- **In Dagster Cloud**: +- **In Dagster+**: - **Your organization and deployment names.** - - **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. + - **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. - **Permissions in Amazon Web Services (AWS) that allow you to:** @@ -32,7 +32,7 @@ To complete the steps in this guide, you'll need: --- -## Step 1: Generate a Dagster Cloud agent token +## Step 1: Generate a Dagster+ agent token @@ -125,7 +125,7 @@ To successfully run your ECS agent, you'll need to have the following IAM roles - Building it into your image - Echoing it to a file in your task definition's command **before starting the agent** - Refer to the [ECS configuration reference](/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference#per-deployment-configuration) for more info about the required fields. + Refer to the [ECS configuration reference](/dagster-plus/deployment/agents/amazon-ecs/configuration-reference#per-deployment-configuration) for more info about the required fields. --- @@ -133,6 +133,6 @@ To successfully run your ECS agent, you'll need to have the following IAM roles Now that you've got your agent running, what's next? -- **If you're getting Dagster Cloud set up**, the next step is to [add a code location](/dagster-cloud/managing-deployments/code-locations) using the agent. +- **If you're getting Dagster+ set up**, the next step is to [add a code location](/dagster-plus/managing-deployments/code-locations) using the agent. -- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster Cloud](/dagster-cloud/managing-deployments/code-locations) guide for more info. +- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster+](/dagster-plus/managing-deployments/code-locations) guide for more info. diff --git a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx b/docs/content/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx similarity index 75% rename from docs/content/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx rename to docs/content/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx index d5077e116fcc5..15ed00d5b2863 100644 --- a/docs/content/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx +++ b/docs/content/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template.mdx @@ -4,9 +4,9 @@ title: Upgrading CloudFormation for an Amazon Elastic Container Services agent | # Upgrading CloudFormation for an Amazon Elastic Container Services agent -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. -In this guide, we'll show you how to upgrade an existing [Amazon Elastic Container Services (ECS) agent](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc)'s CloudFormation template. +In this guide, we'll show you how to upgrade an existing [Amazon Elastic Container Services (ECS) agent](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc)'s CloudFormation template. **Note**: To complete the steps in this guide, you'll need [permissions in Amazon Web Services (AWS) that allow you to manage ECS agents](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/security-iam-awsmanpol.html). @@ -14,9 +14,9 @@ In this guide, we'll show you how to upgrade an existing [Amazon Elastic Contain 2. Navigate to the deployed stack and click **Update**. -3. Select **Replace current template**. You can specify a specific Dagster Cloud version or upgrade to the latest template. +3. Select **Replace current template**. You can specify a specific Dagster+ version or upgrade to the latest template. - **If you have deployed your agent into [its own VPC](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc), use the following:** + **If you have deployed your agent into [its own VPC](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc), use the following:** To use the latest template: @@ -26,7 +26,7 @@ In this guide, we'll show you how to upgrade an existing [Amazon Elastic Contain https://s3.amazonaws.com/dagster.cloud/cloudformation/ecs-agent-vpc-1-0-3.yaml - **If you are deploying the agent into an [existing VPC](/dagster-cloud/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc), use the following:** + **If you are deploying the agent into an [existing VPC](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-existing-vpc), use the following:** To use the latest template: diff --git a/docs/content/dagster-plus/deployment/agents/customizing-configuration.mdx b/docs/content/dagster-plus/deployment/agents/customizing-configuration.mdx new file mode 100644 index 0000000000000..5d08ef069fb1d --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/customizing-configuration.mdx @@ -0,0 +1,140 @@ +--- +title: Customizing agent configuration | Dagster Docs +description: Configure your agent. + +platform_type: "cloud" +--- + +# Customizing agent configuration + +This guide is applicable to Dagster+. + +The Dagster+ Agent is a special variant of the Dagster instance used in [Dagster Open Source](/deployment/dagster-instance) and is configured through the same `dagster.yaml` file. You can customize your agent with these settings. + +**Note:** For [Kubernetes agents](/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent) deployed with the Dagster+ Helm chart, you'll need to refer to the Helm chart's config map for customizing the agent. + +--- + +## Enabling user code server TTL + +User code servers support a configurable time-to-live (TTL). The agent will spin down any user code servers that haven’t served requests recently and will spin them back up the next time they’re needed. Configuring TTL can save compute cost because user code servers will spend less time sitting idle. + +To configure TTL: + +```yaml +# dagster.yaml +instance_class: + module: dagster_cloud.instance + class: DagsterCloudAgentInstance + +dagster_cloud_api: + agent_token: + env: DAGSTER_CLOUD_AGENT_TOKEN + deployment: prod + +user_code_launcher: + module: dagster_cloud.workspace.docker + class: DockerUserCodeLauncher + config: + server_ttl: + enabled: true + ttl_seconds: 7200 #2 hours +``` + +--- + +## Streaming compute logs + +You can set up streaming compute logs by configuring the log upload interval (in seconds). + +```yaml +# dagster.yaml +instance_class: + module: dagster_cloud.instance + class: DagsterCloudAgentInstance + +dagster_cloud_api: + agent_token: + env: DAGSTER_CLOUD_AGENT_TOKEN + deployment: prod + +user_code_launcher: + module: dagster_cloud.workspace.docker + class: DockerUserCodeLauncher + +compute_logs: + module: dagster_cloud + class: CloudComputeLogManager + config: + upload_interval: 60 +``` + +--- + +## Disabling compute logs + +You can disable forwarding compute logs to Dagster+ by configuring the `NoOpComputeLogManager` setting: + +```yaml +# dagster.yaml +instance_class: + module: dagster_cloud.instance + class: DagsterCloudAgentInstance + +dagster_cloud_api: + agent_token: + env: DAGSTER_CLOUD_AGENT_TOKEN + deployment: prod + +user_code_launcher: + module: dagster_cloud.workspace.docker + class: DockerUserCodeLauncher + +compute_logs: + module: dagster.core.storage.noop_compute_log_manager + class: NoOpComputeLogManager +``` + +--- + +## Writing compute logs to AWS S3 + +You can write compute logs to an AWS S3 bucket by configuring the [dagster_aws.s3.compute_log_manager](https://docs.dagster.io/\_apidocs/libraries/dagster-aws#dagster_aws.s3.S3ComputeLogManager) module. + +You are also able to stream partial compute log files by configuring the log upload interval (in seconds) using the `upload_interval` parameter. + +Note: Dagster Labs will neither have nor use your AWS credentials. The Dagster+ UI will be able to show the URLs linking to the compute log files in your S3 bucket when you set the `show_url_only` parameter to `true`. + +```yaml +# dagster.yaml +instance_class: + module: dagster_cloud.instance + class: DagsterCloudAgentInstance + +dagster_cloud_api: + agent_token: + env: DAGSTER_CLOUD_AGENT_TOKEN + deployment: prod + +user_code_launcher: + module: dagster_cloud.workspace.docker + class: DockerUserCodeLauncher + +compute_logs: + module: dagster_aws.s3.compute_log_manager + class: S3ComputeLogManager + config: + bucket: "mycorp-dagster-compute-logs" + local_dir: "/tmp/cool" + prefix: "dagster-test-" + use_ssl: true + verify: true + verify_cert_path: "/path/to/cert/bundle.pem" + endpoint_url: "http://alternate-s3-host.io" + skip_empty_files: true + upload_interval: 30 + upload_extra_args: + ServerSideEncryption: "AES256" + show_url_only: true + region: "us-west-1" +``` diff --git a/docs/content/dagster-plus/deployment/agents/docker.mdx b/docs/content/dagster-plus/deployment/agents/docker.mdx new file mode 100644 index 0000000000000..b2793f2c1143a --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/docker.mdx @@ -0,0 +1,20 @@ +--- +title: Dagster+ Docker agents | Dagster Docs + +platform_type: "cloud" +--- + +# Dagster+ Docker agents + +Used with a Dagster+ [Hybrid deployment](/dagster-plus/deployment/hybrid), the Docker agent executes Dagster jobs in Docker containers on your computer. + + + + + diff --git a/docs/content/dagster-plus/deployment/agents/docker/configuration-reference.mdx b/docs/content/dagster-plus/deployment/agents/docker/configuration-reference.mdx new file mode 100644 index 0000000000000..e2b7782617b04 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/docker/configuration-reference.mdx @@ -0,0 +1,22 @@ +--- +title: Docker agent configuration reference | Dagster Docs + +platform_type: "cloud" +--- + +# Docker agent configuration reference + +This guide is applicable to Dagster+. + +This reference describes the various configuration options Dagster+ currently supports for [Docker agents](/dagster-plus/deployment/agents/docker/configuring-running-docker-agent). + +--- + +## Environment variables and secrets + + + +Refer to the following guides for more info about environment variables: + +- [Dagster+ environment variables and secrets](/dagster-plus/managing-deployments/environment-variables-and-secrets) +- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) diff --git a/docs/content/dagster-cloud/deployment/agents/docker/configuring-running-docker-agent.mdx b/docs/content/dagster-plus/deployment/agents/docker/configuring-running-docker-agent.mdx similarity index 81% rename from docs/content/dagster-cloud/deployment/agents/docker/configuring-running-docker-agent.mdx rename to docs/content/dagster-plus/deployment/agents/docker/configuring-running-docker-agent.mdx index 05e4c2f261cce..9d0dd1ae1a16a 100644 --- a/docs/content/dagster-cloud/deployment/agents/docker/configuring-running-docker-agent.mdx +++ b/docs/content/dagster-plus/deployment/agents/docker/configuring-running-docker-agent.mdx @@ -6,7 +6,7 @@ platform_type: "cloud" # Configuring and running a Docker agent -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. In this guide, you'll configure and run a Docker agent. Docker agents are used to launch your code in Docker containers. @@ -16,7 +16,7 @@ In this guide, you'll configure and run a Docker agent. Docker agents are used t To complete the steps in this guide, you'll need: -- **Permissions in Dagster Cloud that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-cloud/account/managing-users) for more info. +- **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. - **To have Docker installed** - **Access to a container registry to which you can push images with Dagster code.** Additionally, your Docker agent must have the permissions required to pull images from the registry. @@ -28,7 +28,7 @@ To complete the steps in this guide, you'll need: --- -## Step 1: Generate a Dagster Cloud agent token +## Step 1: Generate a Dagster+ agent token @@ -64,10 +64,10 @@ To complete the steps in this guide, you'll need: 3. In the file, fill in the following: - - `agent_token` - Add the agent token you created in [Step 1](#step-1-generate-a-dagster-cloud-agent-token) + - `agent_token` - Add the agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) - `deployment` - Enter the deployment associated with this instance of the agent. - In the above example, we specified `prod` as the deployment. This is present when Dagster Cloud organizations are first created. + In the above example, we specified `prod` as the deployment. This is present when Dagster+ organizations are first created. 4. Save the file. @@ -91,7 +91,7 @@ This command: - Starts the agent with your local `dagster.yaml` mounted as a volume - Starts the system Docker socket mounted as a volume, allowing the agent to launch containers. -To view the agent in Dagster Cloud, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: +To view the agent in Dagster+, click the Dagster icon in the top left to navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: Instance Status + + + diff --git a/docs/content/dagster-plus/deployment/agents/kubernetes/configuration-reference.mdx b/docs/content/dagster-plus/deployment/agents/kubernetes/configuration-reference.mdx new file mode 100644 index 0000000000000..3116fabe2cb4c --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/kubernetes/configuration-reference.mdx @@ -0,0 +1,191 @@ +--- +title: Kubernetes agent configuration reference | Dagster Docs + +platform_type: "cloud" +--- + +# Kubernetes agent configuration reference + +This guide is applicable to Dagster+. + +This reference describes the various configuration options Dagster+ currently supports for [Kubernetes agents](/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent). + +--- + +## Viewing the Helm chart + +To see the different customizations that can be applied to the Kubernetes agent, you can view the chart's default values: + +```shell +helm repo add dagster-plus https://dagster-io.github.io/helm-user-cloud +helm repo update +helm show values dagster-plus/dagster-plus-agent +``` + +You can also view the chart values on [ArtifactHub](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values). + +--- + +## Per-deployment configuration + +The [`workspace`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value of the Helm chart provides the ability to add configuration for all jobs that are spun up by the agent, across all repositories. To add secrets or mounted volumes to all Kubernetes Pods, you can specify your desired configuration under this value. + +Additionally, the [`imagePullSecrets`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value allows you to specify a list of secrets that should be included when pulling the images for your containers. + +--- + +## Per-location configuration + +When [adding a code location](/dagster-plus/managing-deployments/code-locations) to Dagster+ with a Kubernetes agent, you can use the `container_context` key on the location configuration to add additional Kubernetes-specific configuration. If you're using the Dagster+ Github action, the `container_context` key can also be set for each location in your `dagster_cloud.yaml` file, using the same format. + +The following example [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) file illustrates the available fields: + +```yaml +# dagster_cloud.yaml + +locations: + - location_name: cloud-examples + image: dagster/dagster-cloud-examples:latest + code_source: + package_name: dagster_cloud_examples + container_context: + k8s: + env_config_maps: + - my_config_map + env_secrets: + - my_secret + env_vars: + - FOO_ENV_VAR=foo_value + - BAR_ENV_VAR + image_pull_policy: Always + image_pull_secrets: + - name: my_image_pull_secret + labels: + my_label_key: my_label_value + namespace: my_k8s_namespace + service_account_name: my_service_account_name + volume_mounts: + - mount_path: /opt/dagster/test_mount_path/volume_mounted_file.yaml + name: test-volume + sub_path: volume_mounted_file.yaml + volumes: + - name: test-volume + config_map: + name: test-volume-configmap + server_k8s_config: # Raw kubernetes config for code servers launched by the agent + pod_spec_config: + node_selector: + disktype: standard + container_config: + resources: + limits: + cpu: 100m + memory: 128Mi + run_k8s_config: # Raw kubernetes config for runs launched by the agent + pod_spec_config: + node_selector: + disktype: ssd + container_config: + resources: + limits: + cpu: 500m + memory: 1024Mi +``` + +### Environment variables and secrets + +Using the `container_context.k8s.env_vars` and `container_context.k8s.env_secrets` properties, you can specify environment variables and secrets for a specific code location. For example: + +```yaml +# dagster_cloud.yaml + +location: + - location_name: cloud-examples + image: dagster/dagster-cloud-examples:latest + code_source: + package_name: dagster_cloud_examples + container_context: + k8s: + env_vars: + - database_name + - database_username=hooli_testing + env_secrets: + - database_password +``` + + + + A list of environment variable names to inject into the job, formatted as{" "} + KEY or KEY=VALUE. If only KEY is + specified, the value will be pulled from the current process. + + + A list of secret names, from which environment variables for a job are drawn + using envFrom. Refer to the{" "} + + Kubernetes documentation + {" "} + for more info. + + + +Refer to the following guides for more info about environment variables: + +- [Dagster+ environment variables and secrets](/dagster-plus/managing-deployments/environment-variables-and-secrets) +- [Using environment variables and secrets in Dagster code](/guides/dagster/using-environment-variables-and-secrets) + +--- + +## Op isolation + +By default, each Dagster job will run in its own Kubernetes pod, with each op running in its own subprocess within the pod. + +You can also configure your Dagster job with the [`k8s_job_executor`](https://docs.dagster.io/\_apidocs/libraries/dagster-k8s#dagster_k8s.k8s_job_executor) to run each op in its own Kubernetes pod. For example: + +```python +from dagster import job +from dagster_k8s import k8s_job_executor + +@job(executor_def=k8s_job_executor) +def k8s_job(): + ... +``` + +--- + +## Per-job and per-op configuration + +To add configuration to specific Dagster jobs, ops, or assets, use the `dagster-k8s/config` tag. For example, to specify that a job should have certain resource limits when it runs. Refer to [Customizing your Kubernetes deployment for Dagster Open Source](/deployment/guides/kubernetes/customizing-your-deployment#per-job-kubernetes-configuration) for more info. + +--- + +## Running as a non-root user + +Starting in 0.14.0, the provided `docker.io/dagster/dagster-cloud-agent` image offers a non-root user with id `1001`. To run the agent with this user, you can specify the [`dagsterCloudAgent`](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values) value in the Helm chart to be: + +```yaml +dagsterCloudAgent: + podSecurityContext: + runAsUser: 1001 +``` + +We plan to make this user the default in a future release. + +--- + +## Grant AWS permissions + +You can provide your Dagster pods with [permissions to assume an AWS IAM role](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html) using a [Service Account](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/). For example, you might do this to [configure an S3 IO Manager](/deployment/guides/aws#using-s3-for-io-management). + +1. [Create an IAM OIDC provider for your EKS cluster](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html) +2. [Create an IAM role and and attach IAM policies](https://docs.aws.amazon.com/eks/latest/userguide/associate-service-account-role.html) +3. Update the [ Helm chart](#viewing-the-helm-chart) to associate the IAM role with a service account: + + ```bash + serviceAccount: + create: true + annotations: + eks.amazonaws.com/role-arn: "arn:aws:iam::1234567890:role/my_service_account_role" + ``` + +This will allow your agent and the pods it creates to assume the `my_service_account_role` IAM role. diff --git a/docs/content/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx b/docs/content/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx new file mode 100644 index 0000000000000..8def551518385 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent.mdx @@ -0,0 +1,116 @@ +--- +title: Configuring and running a Kubernetes agent | Dagster Docs + +platform_type: "cloud" +--- + +# Configuring and running a Kubernetes agent + +This guide is applicable to Dagster+. + +In this guide, you'll configure and run a Kubernetes agent. Kubernetes agents are used to launch your code in Kubernetes Jobs and Services. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. + +- **A Kubernetes cluster into which you can deploy the agent.** This can be a self-hosted Kubernetes cluster or a managed offering like [Amazon EKS](https://aws.amazon.com/eks/), [Azure AKS](https://azure.microsoft.com/en-us/services/kubernetes-service/#overview), or [Google GKE](https://cloud.google.com/kubernetes-engine). + +- **Access to a container registry to which you can push images and from which pods in the Kubernetes cluster can pull images.** This can be: + + - A self-hosted registry, + - A public registry such as [DockerHub](https://hub.docker.com/), or + - A managed offering such as [Amazon ECR](https://aws.amazon.com/ecr/), [Azure ACR](https://azure.microsoft.com/en-us/services/container-registry/#overview), or [Google GCR](https://cloud.google.com/container-registry) + +- **To have Helm installed.** Refer to the [Helm installation documentation](https://helm.sh/docs/intro/install/) for more info. + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Configure the agent + +In this step, you'll create a Kubernetes namespace for your Dagster+ resources. You'll also add the agent token to the Kubernetes cluster. + +1. Create a Kubernetes namespace for your Dagster+ resources: + + ```shell + kubectl create namespace + ``` + + For example, if the namespace is `dagster-plus`: + + ```shell + kubectl create namespace dagster-plus + ``` + +2. Add the agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) as a secret in the Kubernetes cluster: + + ```shell + kubectl create secret generic dagster-cloud-agent-token \ + --from-literal=DAGSTER_CLOUD_AGENT_TOKEN= \ + --namespace dagster-plus + ``` + +--- + +## Step 3: Start the agent + +In this step, you'll spin up the agent with Helm. + +1. Add the [agent chart repository](https://dagster-io.github.io/helm-user-cloud): + + ```shell + helm repo add dagster-cloud https://dagster-io.github.io/helm-user-cloud + helm repo update + ``` + +2. Create a `values.yaml` file, indicating the Dagster+ deployment the agent will be responsible for. **Note**: When your Dagster+ organization is first created, the default deployment is `prod`: + + ```yaml + dagsterCloud: + deployment: prod + ``` + +3. Install the agent chart, specifying: + + - The namespace into which to install the chart + - The `values.yaml` file created in the previous step + + ```shell + helm upgrade \ + --install user-cloud dagster-cloud/dagster-cloud-agent \ + --namespace dagster-plus \ + --values ./values.yaml + ``` + +After the Helm chart is installed into the Kubernetes cluster, you can view the agent in Dagster+. + +In Dagster+, navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: + + + +--- + +## Next steps + +Now that you've got your agent running, what's next? + +- **If you're getting Dagster+ set up**, the next step is to [add a code location](/dagster-plus/managing-deployments/code-locations) using the agent. + +- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster+](/dagster-plus/managing-deployments/code-locations) guide for more info. + +You can also further [configure the jobs the agent runs](/dagster-plus/deployment/agents/kubernetes/configuration-reference), including adding secrets, mounting volumes, and setting resource limits. diff --git a/docs/content/dagster-plus/deployment/agents/local.mdx b/docs/content/dagster-plus/deployment/agents/local.mdx new file mode 100644 index 0000000000000..23b879d0c8ea0 --- /dev/null +++ b/docs/content/dagster-plus/deployment/agents/local.mdx @@ -0,0 +1,102 @@ +--- +title: Configuring and running a local agent | Dagster Docs + +platform_type: "cloud" +--- + +# Configuring and running a local agent + +This guide is applicable to Dagster+. + +In this guide, you'll configure and run a local agent. Local agents are used to launch user code in operating system subprocesses. + +**Considering running the agent in production?** Running the local agent in production can be a good choice if: + +- Load is relatively light (guaranteed to fit on a single node) +- Jobs aren’t computationally intensive and don’t use much memory +- A Kubernetes cluster is prohibitively difficult to set up +- You can restart the agent when you update your code + +Keep in mind that the local agent’s ability to run jobs is limited by the capacity of the single node on which it’s running. + +If you’re running the local agent in production, make sure you’ve set up a supervisor to automatically restart the agent process if it crashes. You’ll also want a system in place to alert you if the VM or container dies, or to automatically restart it. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **Permissions in Dagster+ that allow you to manage agent tokens**. Refer to the [User permissions documentation](/dagster-plus/account/managing-users) for more info. +- **To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli)**. This should be in the same environment where the agent will run. + + **Note**: Your Dagster application code and its Python and system requirements must also be installed in this environment. We recommend using Python [virtual environments](https://packaging.python.org/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment) to accomplish this. + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Configure the agent + +1. Create a directory on disk to act as your Dagster home. We'll use `~/dagster_home` in our examples, but this directory can be located wherever you want. + +2. In the directory you created, create a `dagster.yaml` file like the following: + + ```yaml + # ~/dagster_home/dagster.yaml + + instance_class: + module: dagster_cloud.instance + class: DagsterCloudAgentInstance + + dagster_cloud_api: + agent_token: + deployment: prod + + user_code_launcher: + module: dagster_cloud.workspace.user_code_launcher + class: ProcessUserCodeLauncher + ``` + +3. In the file, fill in the following: + + - `agent_token` - Add the agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token). This specifies that the agent will launch work in local processes, reading the API URL and authentication token from environment variables. + + - `deployment` - Enter the deployment associated with this instance of the agent. + + In the above example, we specified `prod` as the deployment. This is present when Dagster+ organizations are first created. + +4. Save the file. + +--- + +## Step 3: Run the agent + +Next, run the process agent by pointing at the home directory you created: + +```shell +dagster-cloud agent run ~/dagster_home/ +``` + +To view the agent in Dagster+, click the Dagster icon in the top left to navigate to the **Status** page and click the **Agents** tab. You should see the agent running in the **Agent statuses** section: + + + +--- + +## Next steps + +Now that you've got your agent running, what's next? + +- **If you're getting Dagster+ set up**, the next step is to [add a code location](/dagster-plus/managing-deployments/code-locations) using the agent. + +- **If you're ready to load your Dagster code**, refer to the [Adding Code to Dagster+](/dagster-plus/managing-deployments/code-locations) guide for more info. diff --git a/docs/content/dagster-cloud/deployment/agents/running-multiple-agents.mdx b/docs/content/dagster-plus/deployment/agents/running-multiple-agents.mdx similarity index 79% rename from docs/content/dagster-cloud/deployment/agents/running-multiple-agents.mdx rename to docs/content/dagster-plus/deployment/agents/running-multiple-agents.mdx index fe4f350df2312..153bfc7a33b26 100644 --- a/docs/content/dagster-cloud/deployment/agents/running-multiple-agents.mdx +++ b/docs/content/dagster-plus/deployment/agents/running-multiple-agents.mdx @@ -5,11 +5,9 @@ platform_type: "cloud" # Running multiple agents -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. -Each Dagster Cloud full deployment (e.g., `prod`) needs to have at least one agent running. A single agent is adequate for many use cases, but you may want to run multiple agents to provide redundancy if a single agent goes down. - -It's recommended to only use multiple agents of the same type (e.g. multiple Kubernetes agents). +Each Dagster+ full deployment (e.g., `prod`) needs to have at least one agent running. A single agent is adequate for many use cases, but you may want to run multiple agents to provide redundancy if a single agent goes down. --- @@ -125,7 +123,7 @@ dagsterCloud: ### In Amazon ECS -The `isolated_agents` option can be set as per-deployment configuration on the `dagster.yaml` file used by your agent. See the [ECS configuration reference](/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference#per-deployment-configuration) guide for more information. +The `isolated_agents` option can be set as per-deployment configuration on the `dagster.yaml` file used by your agent. See the [ECS configuration reference](/dagster-plus/deployment/agents/amazon-ecs/configuration-reference#per-deployment-configuration) guide for more information.
    @@ -134,9 +132,9 @@ The `isolated_agents` option can be set as per-deployment configuration on the ` ## Routing requests to specific agents -Agent queues are a Dagster Cloud Enterprise feature. +Agent queues are a Dagster+ Pro feature. -Every Dagster Cloud agent serves requests from one or more queues. By default, requests for each code location are placed on a default queue and your agent will read requests only from that default queue. +Every Dagster+ agent serves requests from one or more queues. By default, requests for each code location are placed on a default queue and your agent will read requests only from that default queue. In some cases, you might want to route requests for certain code locations to specific agents. For example, routing requests for one code location to an agent running in an on-premise data center, but then routing requests for all other code locations to an agent running in AWS. @@ -144,7 +142,7 @@ To route requests for a code location to a specific agent, annotate the code loc ### Step 1: Define an agent queue for the code location -First, set an agent queue for the code location in your [`dagster_cloud.yaml`](/dagster-cloud/managing-deployments/dagster-cloud-yaml): +First, set an agent queue for the code location in your [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml): ```yaml # dagster_cloud.yaml @@ -191,7 +189,7 @@ helm upgrade \ Or if you're using a `values.yaml` file: ```yaml -dagsterCloued +dagsterCloud: agentQueues: # Continue to handle requests for code locations that aren't # assigned to a specific agent queue @@ -200,5 +198,21 @@ dagsterCloued - special-queue ``` +
    + + +#### In Amazon ECS + +Modify your ECS Cloud Formation template to add the following configuration to the `config.yaml` passed to the agent: + +```yaml +agent_queues: + # Continue to handle requests for code locations that aren't + # assigned to a specific agent queue + include_default_queue: true + additional_queues: + - special-queue +``` + diff --git a/docs/content/dagster-plus/deployment/hybrid.mdx b/docs/content/dagster-plus/deployment/hybrid.mdx new file mode 100644 index 0000000000000..f44f331ca860b --- /dev/null +++ b/docs/content/dagster-plus/deployment/hybrid.mdx @@ -0,0 +1,88 @@ +--- +title: Hybrid deployments in Dagster+ | Dagster Docs +--- + +# Hybrid deployments in Dagster+ + +This guide is applicable to Dagster+. + +In this guide, we'll cover how a Hybrid deployment works in Dagster+, including its system architecture, agent, and security. + +--- + +## Hybrid architecture overview + +A **hybrid deployment** utilizes a combination of your infrastructure and Dagster-hosted backend services. + +The Dagster backend services - including the web frontend, GraphQL API, metadata database, and daemons (responsible for executing schedules and sensors) - are hosted in Dagster+. You are responsible for running an [agent](/dagster-plus/deployment/agents) in your environment. + + + + + +Work is enqueued for your agent when: + +- Users interact with the web front end, +- The GraphQL API is queried, or +- Schedules and sensors tick + +The agent polls the agent API to see if any work needs to be done and launches user code as appropriate to fulfill requests. User code then streams metadata back to the agent API (GraphQL over HTTPS) to make it available in Dagster+. + +All user code runs within your environment, in isolation from Dagster system code. + +--- + +## The agent + +Because the agent communicates with the Dagster+ control plane over the agent API, it’s possible to support agents that operate in arbitrary compute environments. + +This means that over time, Dagster+’s support for different user deployment environments will expand and custom agents can take advantage of bespoke compute environments such as HPC. + +Refer to the [Agents documentation](/dagster-plus/deployment/agents) for more info, including the agents that are currently supported. + +--- + +## Security + +This section describes how Dagster+ interacts with user code. To summarize: + +- No ingress is required from Dagster+ to user environments +- Dagster+ doesn't have access to user code. Metadata about the code is fetched over constrained APIs. + +These highlights are described in more detail below: + +- [Interactions and queries](#interactions-and-queries) +- [Runs](#runs) +- [Ingress](#ingress) + +### Interactions and queries + +When Dagster+ needs to interact with user code - for instance, to display the structure of a job in the Dagster+ user interface, to run the body of a sensor definition, or to launch a run for a job - it enqueues a message for the Dagster+ Agent. The Dagster+ Agent picks up this message and then launches or queries user code running on the appropriate compute substrate. + +Depending on the agent implementation, user code may run in isolated OS processes, in Docker containers, in ECS Tasks, in Kubernetes Jobs and Services, or in a custom isolation strategy. + +Queries to user code run over a well-defined grpc interface. Dagster+ uses this interface to: + +- Retrieve the names, config schemas, descriptions, tags, and structures of jobs, ops, repositories, partitions, schedules, and sensors defined in your code +- Evaluate schedule and sensor ticks and determine whether a run should be launched + +When the agent queries user code, it writes the response back to Dagster+ over a well-defined GraphQL interface. + +### Runs + +Runs are launched by calling the `dagster api` CLI command in a separate process/container as appropriate to the agent type. Run termination is handled by interrupting the user code process/container as appropriate for the compute substrate. + +When runs are launched, the user code process/container streams structured metadata (containing everything that is viewable in the integrated logs viewer in the Dagster+ UI) back to Dagster+ over a well-defined GraphQL interface. Structured metadata is stored in Amazon RDS, encrypted at rest. + +At present, the run worker also uploads the compute logs (raw `stdout` and `stderr` from runs) to Dagster+. + +### Ingress + +No ingress is required from Dagster+ to user environments. All dataflow and network requests are unidirectional from user environments to Dagster+. + +**Note:** To ensure that user code remains completely isolated in the user environment, Dagster+ does not currently support previews of Dagstermill notebooks. Supporting these previews securely is a roadmap feature. diff --git a/docs/content/dagster-plus/deployment/serverless.mdx b/docs/content/dagster-plus/deployment/serverless.mdx new file mode 100644 index 0000000000000..ac6ff70cf7ff1 --- /dev/null +++ b/docs/content/dagster-plus/deployment/serverless.mdx @@ -0,0 +1,427 @@ +--- +title: "Serverless deployment in Dagster+ | Dagster Docs" +--- + +# Serverless deployment in Dagster+ + +This guide is applicable to Dagster+. + +Dagster+ Serverless is a fully managed version of Dagster+, and is the easiest way to get started with Dagster. With Serverless, you can run your Dagster jobs without spinning up any infrastructure. + +--- + +## When to choose Serverless + +Serverless works best with workloads that primarily orchestrate other services or perform light computation. Most workloads fit into this category, especially those that orchestrate third-party SaaS products like cloud data warehouses and ETL tools. + +If any of the following are applicable, you should select [Hybrid deployment](/dagster-plus/deployment/hybrid): + +- You require substantial computational resources. For example, training a large machine learning (ML) model in-process. +- Your dataset is too large to fit in memory. For example, training a large machine learning (ML) model in-process on a terabyte of data. +- You need to distribute computation across many nodes for a single run. Dagster+ runs currently execute on a single node with 4 CPUs. +- You don't want to add Dagster Labs as a data processor. + +--- + +## Limitations + +Serverless is subject to the following limitations: + +- Maximum of 100 GB of bandwidth per day +- Maximum of 4500 step-minutes per day +- Runs receive 4 vCPU cores, 16 GB of RAM and 128 GB of ephemeral disk +- Code locations receive 0.25 vCPU cores and 1 GB of RAM +- All Serverless jobs run in the United States + +Pro customers may request a quota increase by [contacting Sales](mailto:sales@dagsterlabs.com). + +--- + +## Getting started with Serverless + +- [With GitHub](#with-github) +- [With Gitlab](#with-gitlab) +- [Other Git providers or local development)](#other-bitbucket-or-local-development) +- [Adding secrets](#adding-secrets) + +### With GitHub + +If you are a GitHub user, our GitHub integration is the fastest way to get started. It uses a GitHub app and GitHub Actions to set up a repo containing skeleton code and configuration consistent with Dagster+'s best practices with a single click. + +When you create a new Dagster+ organization, you'll be prompted to choose Serverless or Hybrid deployment. Once activated, our GitHub integration will scaffold a new git repo for you with Serverless and Branch Deployments already configured. Pushing to the `main` branch will deploy to your `prod` Serverless deployment. Pull requests will spin up ephemeral [branch deployments](/dagster-plus/managing-deployments/branch-deployments) using the Serverless agent. + + +If you are importing a Dagster project that's in an existing GitHub repo: + +- The repo will need to allow the [Workflow permission](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository) for `Read and write permissions`. Workflow permissions settings can be found in GitHub's `Settings` > `Actions` > `General` > `Workflow permissions`. In GitHub Enterprise, these permissions [are controlled at the Organization level](https://github.com/orgs/community/discussions/57244). + +- An initial commit will need to be able to be merged directly to the repo's `main` branch to automatically add the GitHub Actions workflow files. If [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/about-protected-branches#about-branch-protection-rules) require changes be done through a pull request, it will prevent the automatic setup completing. + - You can temporarily disable the branch protection rules and then re-enable them after the automatic setup completes. Alternatively, you can manually set up the GitHub Actions workflows. You can use our ­­[dagster-cloud-serverless-quickstart​​​­ r​​​epo](https://github.com/dagster-io/dagster-cloud-serverless-quickstart) as a template and its [README](https://github.com/dagster-io/dagster-cloud-serverless-quickstart/blob/main/README.md) as a guide​​. + + + +### With Gitlab + +If you are a Gitlab user, our Gitlab integration is the fastest way to get started. It uses a Gitlab app to set up a repo containing skeleton code and CI/CD configuration consistent with Dagster+'s best practices with a single click. + +When you create a new Dagster+ organization, you'll be prompted to choose Serverless or Hybrid deployment. Once activated, our Gitlab integration will scaffold a new git repo for you with Serverless and Branch Deployments already configured. Pushing to the `main` branch will deploy to your `prod` Serverless deployment. Pull requests will spin up ephemeral [branch deployments](/dagster-plus/managing-deployments/branch-deployments) using the Serverless agent. + +### Other (BitBucket or local development) + +If you don't want to use our GitHub/Gitlab integrations, we offer a powerful CLI that you can use in another CI environment or on your local laptop. + +First, [create a new project](https://docs.dagster.io/getting-started/create-new-project#create-a-new-project) with the Dagster open-source CLI. + +The below example uses our [quickstart_etl example project](https://github.com/dagster-io/dagster/tree/master/examples/quickstart_etl). For more info about the examples, visit the [Dagster GitHub repository](https://github.com/dagster-io/dagster/tree/master/examples). + +```shell +pip install dagster +dagster project from-example \ + --name my-dagster-project \ + --example quickstart_etl +``` + + + If using a different project, ensure that dagster-cloud is included as a dependency in your{" "} + setup.py or requirements.txt file. + +For example, in `my-dagster-project/setup.py`: + +```python +install_requires=[ + "dagster", + "dagster-cloud", # add this line + ... +``` + + + +Next, install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli) and log in to your org. **Note**: The CLI requires a recent version of Python 3 and Docker. + +```shell +pip install dagster-cloud +dagster-cloud configure +``` + +You can also configure the `dagster-cloud` tool noninteractively; see [the CLI docs](/dagster-plus/managing-deployments/dagster-plus-cli#environment-variables-and-cli-options) for more information. + +Finally, deploy your project with Dagster+ Serverless: + +```shell +dagster-cloud serverless deploy-python-executable ./my-dagster-project \ + --location-name example \ + --package-name quickstart_etl \ + --python-version 3.12 +``` + +**Note:** Windows users should use the `deploy` command instead of `deploy-python-executable`. + +### Adding secrets + +Often you'll need to securely access secrets from your jobs. Dagster+ supports several methods for adding secrets - refer to the [Dagster+ environment variables and secrets documentation](/dagster-plus/managing-deployments/environment-variables-and-secrets) for more info. + +### Adding dependencies + +Any dependencies specified in either `requirements.txt` or `setup.py` will be installed for you automatically by the Dagster+ Serverless infrastructure. + +--- + +## Customizing the runtime environment + +Dagster+ Serverless packages your code as [PEX](https://pex.readthedocs.io) files and deploys them on Docker images. Using PEX files significantly reduces the time to deploy since it does not require building a new Docker image and provisioning a new container for every code change. Many apps will work fine with the default Dagster+ Serverless setup. However, some apps may need to make changes to the runtime environment, either to include data files, use a different base image, different Python version, or install some native dependencies. You can customize the runtime environment using various methods described below. + +### Including data files + +To add data files to your deployment, use the [Data Files Support](https://setuptools.pypa.io/en/latest/userguide/datafiles.html) built into Python's `setup.py`. This requires adding a `package_data` or `include_package_data` keyword in the call to `setup()` in `setup.py`. For example, given this directory structure: + + - setup.py + - my_dagster_project/ + - __init__.py + - repository.py + - data/ + - file1.txt + - file2.csv + +If you want to include the `data` folder, modify your `setup.py` to add the `package_data` line: + +```python +# setup.py +from setuptools import find_packages, setup + +if __name__ == "__main__": + setup( + name="my_dagster_project", + packages=find_packages(exclude=["my_dagster_project_tests"]), + # Add the following line. Here "data/*" is relative to the my_dagster_project sub directory. + package_data={"my_dagster_project": ["data/*"]}, + install_requires=[ + "dagster", + ... + ], + ) +``` + +### Using a different Python version + +The default version of Python for Serverless deployments is Python 3.8. Versions 3.9 through 3.12 are also supported. You can specify the version you want by updating your GitHub workflow or using the `--python-version` command line argument: + +- **With GitHub**: Change the `python_version` parameter for the `build_deploy_python_executable` job in your `.github/workflows` files. For example: + + ```yaml + - name: Build and deploy Python executable + if: env.ENABLE_FAST_DEPLOYS == 'true' + uses: dagster-io/dagster-cloud-action/actions/build_deploy_python_executable@pex-v0.1 + with: + dagster_cloud_file: "$GITHUB_WORKSPACE/project-repo/dagster_cloud.yaml" + build_output_dir: "$GITHUB_WORKSPACE/build" + python_version: "3.9" # Change this value to the desired Python version + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ``` + +- **With the CLI**: Add the `--python-version` CLI argument to the deploy command to specify the registry path to the desired base image: + + ```shell + dagster-cloud serverless deploy-python-executable --location-name=my_location --python-version=3.9 + ``` + +### Using a different base image or using native dependencies + +Dagster+ runs your code on a Docker image that we build as follows: + +1. The standard Python "slim" [Docker image](https://hub.docker.com/\_/python), such as `python:3.8-slim` is used as the base. +2. The `dagster-cloud[serverless]` module installed in the image. + +As far as possible, add all dependencies by including the corresponding native Python bindings in your `setup.py`. When that is not possible, you can build and upload a custom base image that will be used to run your Python code. + +To build and upload the image, use the command line: + +1. Build your Docker image using `docker build` or your usual Docker toolchain. Ensure the `dagster-cloud[serverless]` dependency is included. You can do this by adding the following to your `Dockerfile`: + + ```shell + RUN pip install "dagster-cloud[serverless]" + ``` + +2. Upload your Docker image to Dagster+ using the `upload-base-image` command. Note that this command prints out the tag used in Dagster+ to identify your image: + + ```shell + $ dagster-cloud serverless upload-base-image local-image:tag + + ... + To use the uploaded image run: dagster-cloud serverless deploy-python-executable ... --base-image-tag=sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 + ``` + +3. To use a Docker image you have published to Dagster+, use the `--base-image-tag` tag printed out by the above command. + + - **With GitHub**: Set the `SERVERLESS_BASE_IMAGE_TAG` environment variable in your GitHub Actions configuration (usually at `.github/workflows/dagster-cloud-deploy.yml`): + + ```yaml + env: + DAGSTER_CLOUD_URL: ... + DAGSTER_CLOUD_API_TOKEN: ... + SERVERLESS_BASE_IMAGE_TAG: "sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6" + ``` + + - **With the CLI**: Add the `--base-image-tag` CLI argument to the deploy command: + + ```shell + dagster-cloud serverless deploy-python-executable \ + --location-name example \ + --package-name assets_modern_data_stack \ + --base-image-tag sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 + ``` + +### Disabling PEX-based deploys + +Prior to using PEX files, Dagster+ deployed code using Docker images. This feature is still available. To deploy using a Docker image instead of PEX: + +- **With GitHub**: Delete the `ENABLE_FAST_DEPLOYS: 'true'` line in your GitHub Actions configuration (usually at `.github/workflows/dagster-cloud-deploy.yml`): + + ```yaml + env: + DAGSTER_CLOUD_URL: ... + DAGSTER_CLOUD_API_TOKEN: ... + # ENABLE_FAST_DEPLOYS: 'true' # disabled + ``` + +- **With the CLI**: Use the `deploy` command instead of the `deploy-python-executable` command: + + ```shell + dagster-cloud serverless deploy \ + --location-name example \ + --package-name assets_modern_data_stack + ``` + +The Docker image deployed can be customized using either lifecycle hooks or customizing the base image. + + + + +This method is the easiest to set up, and does not require setting up any additional infrastructure. + +In the root of your repo, you can provide two optional shell scripts: `dagster_cloud_pre_install.sh` and `dagster_cloud_post_install.sh`. These will run before and after Python dependencies are installed. They are useful for installing any non-Python dependencies or otherwise configuring your environment. + + + + +This method is the most flexible, but requires setting up a pipeline outside of Dagster to build a custom base image. + +The default base image is `debian:bullseye-slim`, but it can be changed. + +- **With GitHub**: Provide a `base_image` input parameter to the **Build and deploy** step in your GitHub Actions configuration (usually at `.github/workflows/dagster-cloud-deploy.yml`): + + ```yaml + - name: Build and deploy to Dagster+ serverless + uses: dagster-io/dagster-cloud-action/actions/serverless_prod_deploy@v0.1 + with: + dagster_cloud_api_token: ${{ secrets.DAGSTER_CLOUD_API_TOKEN }} + location: ${{ toJson(matrix.location) }} + # Use a custom base image + base_image: "my_base_image:latest" + organization_id: ${{ secrets.ORGANIZATION_ID }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + ``` + +- **With the CLI**: Add the `--base-image` CLI argument to the deploy command to specify the registry path to the desired base image: + + ```shell + dagster-cloud serverless deploy --location-name=my_location --base-image=my_base_image:latest + ``` + + + + +--- + +## Transitioning to Hybrid + +If your organization begins to hit the limitations of Serverless, you should transition to a Hybrid deployment. Hybrid deployments allow you to run an [agent in your own infrastructure](/dagster-plus/deployment/agents) and give you substantially more flexibility and control over the Dagster environment. + +To switch to Hybrid, navigate to **Status > Agents** in your Dagster+ account. On this page, an organization administrator can disable the Serverless agent on and view instructions for enabling Hybrid. + +After changing the deployment type, you will need to update your code locations' images and configuration to be compatible with the type of Hybrid agent that you chose. Complete the following steps to finalize the transition: + +- **Update your code locations' configuration in [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) to work with your agent.** Refer to the reference for your agent type for more information: + + - [Amazon ECS](/dagster-plus/deployment/agents/amazon-ecs/configuration-reference#per-location-configuration) + - [Kubernetes](/dagster-plus/deployment/agents/kubernetes/configuration-reference#per-location-configuration) + - [Docker](/dagster-plus/deployment/agents/docker/configuration-reference) + +- **Update your build process** to publish a new container image and configuration for each code location. To use Dagster's CI/CD process, refer to [Step 4 of the Dagster+ getting started guide](/dagster-plus/getting-started#step-4-configure-cicd-for-your-project). + +- **Replace Serverless-only features with their Hybrid equivalents**: + - [**Lifecycle hooks**](#disabling-pex-based-deploys) - To customize a code location's runtime environment, customize the code location's [`Dockerfile`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/Dockerfile) to build its image + - [**Non-isolated runs**](#non-isolated-runs) - While this feature doesn't have a direct Hybrid equivalent, experiment with the [`in_process_executor` or `multiprocess_executor`](/\_apidocs/execution#executors) for specific jobs or entire code locations to reduce overhead. + +--- + +## Security and data protection + +Unlike Hybrid, Serverless Deployments on Dagster+ require direct access to your data, secrets and source code. + +- Secrets and source code are built into the image directly. Images are stored in a per-customer container registry with restricted access. +- User code is securely sandboxed using modern container sandboxing techniques. +- All production access is governed by industry-standard best practices which are regularly audited. + +### I/O management on Serverless + + + The default I/O manager cannot be used if you are a Serverless user who: +
      +
    • Works with personally identifiable information (PII)
    • +
    • Works with private health information (PHI)
    • +
    • Has signed a business association agreement (BAA), or
    • +
    • + Are otherwise working with data subject to GDPR or other such regulations +
    • +
    +
    {" "} + +In Serverless, code that uses the default [I/O manager](/concepts/io-management/io-managers#built-in-io-managers) is automatically adjusted to save data in Dagster+ managed storage. This automatic change is useful because the default file system in Serverless is ephemeral, which means the default I/O manager wouldn't work as expected. However, this automatic change means potentially sensitive data is being stored, not just processed or orchestrated, by Dagster+. + +To avoid this behavior, you can: + +- Use an I/O manager that stores data in your infrastructure +- Write code that doesn't use an I/O manager + +--- + +## Whitelisting / Allowlisting Dagster's IP addresses + +Serverless code will make requests from one of the following IP addresses. You may need to whitelist / allowlist them for services your code interacts with. + +```plain +34.216.9.66 +35.162.181.243 +35.83.14.215 +44.230.239.14 +44.240.64.133 +52.34.41.163 +52.36.97.173 +52.37.188.218 +52.38.102.213 +52.39.253.102 +52.40.171.60 +52.89.191.177 +54.201.195.80 +54.68.25.27 +54.71.18.84 +``` + +**Note**: Additional IP addresses may be added over time. This list was last updated on **January 31, 2024.** + +--- + +## Run isolation + +Dagster+ Serverless offers two settings for run isolation: isolated and non-isolated. Non-isolated runs are for iterating quickly and trade off isolation for speed. Isolated runs are for production and compute heavy Assets/Jobs. + +### Isolated runs (default) + +Isolated runs each take place in their own container with their own compute resources: 4 cpu cores and 16GB of RAM. + +These runs may take up to 3 minutes to start while these resources are provisioned. + +When launching runs manually, select `Isolate run environment` in the Launchpad to launch an isolated runs. Scheduled, sensor, and backfill runs are always isolated. + + + +_Note: if non-isolated runs aren't enabled (see the section below), the toggle won't appear and all runs will be isolated._ + +### Non-isolated runs + +This can be enabled or disabled in deployment settings with + +```yaml +non_isolated_runs: + enabled: True +``` + +Non-isolated runs provide a faster start time by using a standing, shared container for each code location. + +They have fewer compute resources: 0.25 vCPU cores and 1GB of RAM. These resources are shared with other processes for a code location like sensors. As a result, it's recommended to use isolated runs for compute intensive jobs and asset materializations. + +While launching runs from the Launchpad, uncheck `Isolate run environment`. When materializing an asset, shift-click `Materialize all` and uncheck it in the modal. + + + +By default only one non-isolated run will execute at once. While a run is in progress, the Launchpad will swap to only launching isolated runs. + +This limit can be configured in [deployment settings](/dagster-plus/managing-deployments/deployment-settings-reference#non-isolated-runs). Take caution; The limit is in place to help wih avoiding crashes due to OOMs. + +```yaml +non_isolated_runs: + enabled: True + max_concurrent_non_isolated_runs: 1 +``` diff --git a/docs/content/dagster-plus/getting-started.mdx b/docs/content/dagster-plus/getting-started.mdx new file mode 100644 index 0000000000000..b8b6df90b0bb3 --- /dev/null +++ b/docs/content/dagster-plus/getting-started.mdx @@ -0,0 +1,480 @@ +--- +title: Getting started with Dagster+ | Dagster+ +description: "Get up and running with Dagster+." + +platform_type: "cloud" +--- + +# Getting started with Dagster+ + +Welcome to Dagster+! + +In this guide, we'll give you everything you need to get up and running with Dagster+, including: + +- Creating your Dagster+ account +- Selecting a deployment type +- Deploying your code +- Setting up CI/CD +- Setting up environment variables and secrets + +Let's get started! + +--- + +## Step 1: Create a Dagster+ account and organization + +If you don't already have a Dagster+ account, [sign up for one](https://dagster.cloud/signup) before continuing. + +You'll also be asked to create a Dagster+ organization. When your organization is created, a single deployment, named `prod`, will also be created. Refer to the [Managing deployments guide](/dagster-plus/managing-deployments/managing-deployments) for more information about deployments in Dagster+. + +--- + +## Step 2: Select a deployment type + +In this step, you'll select the type of deployment you want to use: **Serverless** or **Hybrid**. + + + + + + + + + + + + + + + + + + + + + +
    + Type + + How it works + May be a good fit if...
    + Serverless + + In a Serverless deployment, Dagster manages the infrastructure needed to + run Dagster jobs. This means you don't need to spin up or manage any + infrastructure. Refer to the{" "} + + Serverless deployment documentation + {" "} + for more info. + +
      +
    • + You want to try Dagster+ without a lot of time investment +
    • +
    • You don't want to focus on infrastructure management
    • +
    • + Your Dagster jobs aren't computationally expensive or memory + intensive +
    • +
    +
    + Hybrid + + In a Hybrid deployment, your Dagster code is executed in your + infrastructure. The agent runs in your infrastructure, executes your + code, and streams metadata about code execution and assets and jobs (for + visualization) over HTTPS back to Dagster+. Refer to the{" "} + + Hybrid deployment documentation + {" "} + for more info. +

    +

    + Dagster+ supports a variety of agents, each with its own prerequisites for + setup. Refer to the + Agent documentation + for more info. +
    + +
    + +Select the deployment type you want to use and [proceed to the next step](#step-3-deploy-your-code). **Note**: If you find your deployment type isn't right for you, it can be changed at any time. + +--- + +## Step 3: Deploy your code + +Now that you've selected a deployment type, the next step is to tell Dagster+ the location of your Dagster code. In addition to this, you'll set up Continuous Integration (CI) and Continuous Deployment (CD). Once completed, [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) and continuous code location deployments will be enabled for your Dagster project. + +The steps for accomplishing this vary depending on the **deployment type** you selected in the previous step: + + + + +Click the tab for your deployment type - [**Serverless**](#serverless) or [**Hybrid**](#hybrid) - to view what's next. + + + + +### Serverless + +For **Serverless deployments**, there are two ways to deploy your code to Dagster+: + +- [**Start from a template**](#use-a-template) - Use one of our quickstart templates to get up and running. All templates come with CI/CD already configured and will be cloned to a new GitHub repository. + +- [**Import an existing project**](#import-an-existing-project) - Import an existing GitHub repository using our GitHub integration or the [dagster-cloud CLI](/dagster-plus/managing-deployments/dagster-plus-cli). **Note**: If using the GitHub integration, Dagster+ will automatically set up CI/CD for you. + +#### Use a template + + + + +##### GitHub + +1. Click **Select** to select a template. +2. Sign in to your GitHub account, if prompted. +3. In Dagster+, define the following: + - **Git scope** - Select the organization or account to create the new repository in. + - **Repository Name** - Enter a name for the new repository. + - Check the **Make git repository private** box to make the repository private. +4. When finished, click **Clone and deploy**. + +When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). + +--- + + + + +##### GitLab + +1. Click **Select** to select a template. +2. Sign in to your Gitlab account, if prompted. +3. In Dagster+, define the following: + - **Namespace** - Select the group or account to create the new project in. + - **Project** - Enter a name for the new project. + - Check the **Make git project private** box to make the project private. +4. When finished, click **Clone and deploy**. + +When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). + +--- + + + + +#### Import an existing project + +If you have existing Dagster code, you can use Dagster's GitHub / Gitlab app or the `dagster-cloud` CLI. + + + + +##### GitHub + +Using the GitHub integration to import an existing GitHub repository also sets up CI/CD for you. + +Before you get started, note that the repository must have a [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml) in order for Dagster+ to deploy it. This file defines the [code locations](/concepts/code-locations) in your Dagster code. + +If this file doesn't currently exist, create it in the root of your repository with the following code: + +```yaml +# dagster_cloud.yaml + +locations: + - location_name: my_location_name # the name of the code location + code_source: + package_name: hackernews # the name of the python package associated with the code location +``` + +After you've committed the file to the repository, come back to Dagster+ to complete the import process: + +1. Click the **Import an existing project** tab. +2. Sign in to your GitHub account, if prompted. +3. In Dagster+, define the following: + - **Git scope** - Select the organization or account that the repository is in. + - **Repository** - Select the repository. +4. Click **Deploy**. + +When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). + + + + +##### GitLab + +Using the Gitlab integration to import an existing Gitlab project also sets up CI/CD for you. + +Before you get started, note that the project must have a [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml) in order for Dagster+ to deploy it. This file defines the [code locations](/concepts/code-locations) in your Dagster code. + +If this file doesn't currently exist, create it in the root of your repository with the following code: + +```yaml +# dagster_cloud.yaml + +locations: + - location_name: my_location_name # the name of the code location + code_source: + package_name: hackernews # the name of the python package associated with the code location +``` + +After you've committed the file to the project, come back to Dagster+ to complete the import process: + +1. Click the **Import an existing project** tab. +2. Sign in to your Gitlab account, if prompted. +3. In Dagster+, define the following: + - **Namespace** - Select the group or account that the project is in. + - **Project** - Select the project. +4. Click **Deploy**. + +When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). + + + + +##### dagster-cloud CLI + + + Heads up! Using the dagster-cloud CLI requires a + recent version of Python 3 and Docker. + + +To complete this step using the CLI, you can use your own Dagster code or the [Dagster starter kit](https://github.com/dagster-io/quickstart-etl). The starter kit is a template with everything you need to get started using Serverless in Dagster+, including CI/CD configuration and the required [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml). + +- **If using the template**, [clone the repository](https://github.com/dagster-io/quickstart-etl) to your local environment. + +- **If not using the template**, add `dagster-cloud` as a dependency in `setup.py`. [Click here for an example](https://github.com/dagster-io/quickstart-etl/blob/main/setup.py). This is already done for you if using the starter kit. + +After you've finished setting up your local project, move on to deploying using the CLI: + +1. To install the `dagster-cloud` CLI, run: + + ```shell + pip install dagster-cloud + ``` + +2. Next, you'll need to authenticate to Dagster+. Run the following command and follow the prompts to log in: + + ```shell + dagster-cloud config setup + ``` + +3. After you've successfully authenticated, run the following to deploy your code to Dagster+: + + ```shell + dagster-cloud serverless deploy \ + --location-name "" \ # name of the code loation + --package-name "" \ # name of the Python package associated with the code location + ``` + +When finished, [continue to Step 5](#step-5-set-up-environment-variables-and-secrets). + + + + + + +### Hybrid + +To set up Hybrid deployment and deploy your code, you'll need to: + +1. Set up an agent +2. Configure CI/CD for your project. We'll walk you through this in [Step 4](#step-4-configure-cicd-for-your-project). + +For most Hybrid deployments - with the exception of those using a local agent - you'll need to create a Docker image containing your Dagster code and then add a code location to Dagster+ that references the image. + +1. The dialog that displays will contain a pre-generated [agent token](/dagster-plus/account/managing-user-agent-tokens) and details about the agents currently supported by Dagster+. + + **Note**: Keep this token handy - you'll need it to set up CI/CD in the next step. + +2. Follow the steps for setting up and deploying your agent: + + - [Amazon Elastic Container Service (ECS)](/dagster-plus/deployment/agents/amazon-ecs/creating-ecs-agent-new-vpc) + - [Docker](/dagster-plus/deployment/agents/docker) + - [Kubernetes](/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent) + - [Local](/dagster-plus/deployment/agents/local) + +3. The dialog will indicate when Dagster+ receives an agent heartbeat. Click **Continue**. + +When finished, [continue to the next step](#step-4-configure-cicd-for-your-project). + + + + +--- + +## Step 4: Configure CI/CD for your project + + + Skip to the next step if using Serverless. This step is only + required for Hybrid deployments. + + +To finish setting up your Hybrid deployment, you'll configure CI/CD for your Dagster project. How this is accomplished depends on your CI/CD provider: + +- **GitHub** - If using GitHub, you can use our GitHub Action workflow to set up CI/CD for your project. +- **Another CI/CD provider** - If you're not using GitHub, you can configure CI/CD using the `dagster-cloud` CLI. + + + + +### GitHub Actions + +To set up continuous integration using GitHub Actions, you can use your own Dagster code or the [Dagster+ Hybrid Quickstart](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart). The quickstart is a template with everything you need to get started using Hybrid deployment in Dagster+. + +- **If using the template**, [clone the repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart). + +- **If not using the template**, copy the GitHub workflow file (`.github/workflows`) from the [Hybrid quickstart repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/tree/main/.github/workflows) and add it to your repository. This is already done for you if using the quickstart. + +**Configure the GitHub workflow YAML file as described below**. The GitHub workflow deploys your code to Dagster+ using these steps: + +1. Initialize - Check out your code and validate `dagster_cloud.yaml`. + +2. Docker image push - Build a Docker image from your code and upload it to your container registry. + +3. Deploy to Dagster+ - Update code locations in Dagster+ to use the new Docker image. + +**To configure the workflow**, follow these steps: + +1. In the repository, set the `DAGSTER_CLOUD_API_TOKEN` GitHub action secret. This is the Dagster+ agent token from the previous section. Refer to the [agent tokens documentation](/dagster-plus/account/managing-user-agent-tokens#managing-agent-tokens) for more info. + + Refer to the [GitHub docs](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) for more info about GitHub Action secrets. + +2. In your [`dagster-cloud-deploy.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/dagster-cloud-deploy.yml), set the `DAGSTER_CLOUD_ORGANIZATION` environment variable to your Dagster+ organization name. + +3. In your `dagster-cloud-deploy.yml`, uncomment the step that is relevant to your Docker container registry. For example, if using DockerHub, you'd uncomment the `DockerHub` step in these files. Ensure you have set up the relevant secrets for building and uploading your Docker images. + +After making the above changes and commiting the workflow file, the CI process should be triggered to deploy your GitHub repository to Dagster+. During the deployment, the agent will attempt to load your code and update the metadata in Dagster+. Once finished, you should see the GitHub Action complete successfully and also be able to see the code location under the **Deployment** tag in Dagster+. + +When finished, [continue to the next step](#step-5-set-up-environment-variables-and-secrets). + + + + +### Other CI/CD provider + +For continuous integration using a CI/CD provider other than GitHub, your system should use the `dagster-cloud ci` subcommand to deploy code locations to Dagster+. + +Ensure that you have created a `dagster_cloud.yaml` file as described in [the quickstart](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/). For detailed documentation about this file see the [`dagster_cloud.yaml` reference](/dagster-plus/managing-deployments/dagster-cloud-yaml). The following steps are typically implemented in the CI/CD workflow: + +1. Set the build environment variables. Note that all variables are required: + + - `DAGSTER_CLOUD_ORGANIZATION` - The name of your organization in Dagster+. + - `DAGSTER_CLOUD_API_TOKEN` - A Dagster+ API token. **Note**: This is a sensitive value and should be stored as a CI/CD secret, if possible. + - `DAGSTER_BUILD_STATEDIR` - A path to a blank or non-existent temporary directory on the build machine. This directory is used to store local state during the build. + +2. Run the configuration check: + + ```shell + dagster-cloud ci check --project-dir=. + ``` + + This is an optional step but useful to validate the contents of your `dagster_cloud.yaml` and connection to Dagster+. + +3. Initialize the build session: + + ```shell + dagster-cloud ci init --project-dir=. + ``` + + This reads the `dagster_cloud.yaml` configuration and initializes the `DAGSTER_BUILD_STATEDIR`. + +4. Build and upload Docker images for your code locations. + + The Docker image should contain a Python environment with `dagster`, `dagster-cloud`, and your code. For reference, see the [example Dockerfile](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/Dockerfile) in our quickstart repository. The example uses `pip install .` to install the code including the dependencies specified in [`setup.py`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/setup.py). + + It is a good idea to use a unique image tag for each Docker build. You can build one image per code location or a shared image for multiple code locations. As an example image tag, you can use the git commit SHA: + + ```shell + export IMAGE_TAG=`git log --format=format:%H -n 1` + ``` + + Use this tag to build and upload your Docker image, for example: + + ```shell + docker build . -t ghcr.io/org/dagster-cloud-image:$IMAGE_TAG + docker push ghcr.io/org/dagster-cloud-image:$IMAGE_TAG + ``` + + The upload step is specific to your Docker container registry and will require authentication. The only requirement is that the registry you upload to must match the registry specified in `dagster_cloud.yaml`. + +5. Update the build session with the Docker image tag. For each code location you want to deploy, run the following command passing the `IMAGE_TAG` used in the previous step: + + ```shell + dagster-cloud ci set-build-output --location-name=code-location-a --image-tag=IMAGE_TAG + ``` + + This command does not deploy the code location but just updates the local state in `DAGSTER_BUILD_STATEDIR`. + +6. Deploy to Dagster+: + + ```shell + dagster-cloud ci deploy + ``` + + This command updates the code locations in Dagster+. Once this finishes successfully, you should be able to see the code locations under the **Deployments** tab in Dagster+. + +**Note**: Creating Branch Deployments using the CLI requires some additional steps. Refer to the [Branch Deployments with the dagster-cloud CLI guide](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments) for more info. + +When finished, [continue to the next step](#step-5-set-up-environment-variables-and-secrets). + + + + +--- + +## Step 5: Set up environment variables and secrets + +Congrats! At this point, your Dagster+ deployment should be up and running. To ensure the external services you use in your Dagster project work correctly, start setting up your [environment variables](/dagster-plus/managing-deployments/environment-variables-and-secrets). Using environment variables, you can securely pass in sensitive info like passwords, API tokens, etc. + +--- + +## Next steps + +From here, you can: + +- [Invite your team](/dagster-plus/account/managing-users) +- [Configure authentication for your account](/dagster-plus/account/authentication) +- [Set up monitoring and alerting](/dagster-plus/managing-deployments/alerts) +- [Learn more setting up CI using Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) diff --git a/docs/content/dagster-plus/insights.mdx b/docs/content/dagster-plus/insights.mdx new file mode 100644 index 0000000000000..36b9f5b271080 --- /dev/null +++ b/docs/content/dagster-plus/insights.mdx @@ -0,0 +1,155 @@ +--- +title: "Dagster+ Insights | Dagster Docs" +description: "Visibility into historical usage and cost metrics." + +platform_type: "cloud" +--- + +# Dagster+ Insights + +Using Dagster+ Insights, you can gain visibility into historical usage and cost metrics such as Dagster+ run duration, credit usage, and failures. You can also: + +- [Integrate other metrics](#integrating-other-metrics) +- [Export metrics from Dagster+](#exporting-metrics) +- [Create alerts for different metrics](/dagster-plus/managing-deployments/alerts) + +--- + +## How it works + +Insights makes it easier to capture operational data not only from Dagster pipelines, but also downstream systems. Visualizations are built right into the Dagster UI, allowing you to explore Dagster usage alongside metrics from other platforms, like Google BigQuery. + +To access Insights, click **Insights** in the top navigation bar in the UI: + + + + + +The left navigation panel on this page contains a list of [available metrics](#available-metrics). For each metric, the daily, weekly, or monthly aggregated values are displayed in the graph. + +Use the tabs above the charts to view metrics for **Assets**, **Asset groups**, **Jobs**, and **Deployments**. + +These metrics are updated on a daily basis. Refer to the [Available metrics](#available-metrics) section for more information about these metrics. + +### Data retention + +How long historical Insights data is retained depends on your Dagster+ plan: + +- **Dagster+ Pro** - 120 days +- **All other plans** - 30 days + +### Available metrics + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Metric + Description
    Dagster credits + The Dagster credit cost associated with computing this object. Dagster + credits are charged for every step that is run, and for every asset that + is materialized. For more information,{" "} + + refer to the pricing FAQ + + . +
    Compute duration + The time spent computing steps. For jobs that run steps in parallel, the + compute duration may be longer than the wall clock time it takes for the + run to complete. +
    Materializations + The number of asset materializations associated with computing this + object. +
    Observations + The number of{" "} + asset observations{" "} + associated with computing this object. +
    Step failures + The number of times steps failed when computing this object.{" "} + Note: Steps that retry and succeed are not included in + this metric. +
    Step retries + The number of times steps were retried when computing this object. +
    Asset check warnings + The number of asset checks{" "} + that produced warnings. +
    Asset check errors + The number of asset checks{" "} + that produced errors. +
    Retry compute + The time spent computing steps, including time spent retrying failed + steps. For jobs that run steps in parallel, the compute duration may be + longer than the wall clock time it takes for the run to complete. +
    + +--- + +## Working with metrics + +### Integrating other metrics + +Other metrics, such as asset materialization metadata or Snowflake credits, can be integrated into Dagster Insights. Insights currently supports the following external metrics: + +- **Asset materialization metadata.** Refer to the [Using asset metadata with Dagster+ Insights guide](/dagster-plus/insights/asset-metadata) for more info. +- **Google BigQuery usage** generated by either [queries made to BigQuery resources](/dagster-plus/insights/integrating-bigquery) or [using dbt to materialize tables](/dagster-plus/insights/integrating-bigquery-and-dbt) +- **Snowflake usage** generated by either [queries made to Snowflake resources](/dagster-plus/insights/integrating-snowflake) or [using dbt to materialize tables](/dagster-plus/insights/integrating-snowflake-and-dbt) + +### Exporting metrics + +Metrics in Dagster+ Insights can be exported using a GraphQL API endpoint. Refer to the [Exporting Insights metrics from Dagster+ guide](/dagster-plus/insights/exporting-insights-metrics) for details. diff --git a/docs/content/dagster-plus/insights/asset-metadata.mdx b/docs/content/dagster-plus/insights/asset-metadata.mdx new file mode 100644 index 0000000000000..16172ad8264c9 --- /dev/null +++ b/docs/content/dagster-plus/insights/asset-metadata.mdx @@ -0,0 +1,78 @@ +--- +title: "Integrating asset metadata into Dagster+ Insights | Dagster Docs" +description: "Integrating external metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating asset metadata into Dagster+ Insights + +Any numeric [asset materialization metadata](/concepts/metadata-tags/asset-metadata) attached to your assets can be viewed and aggregated in the Dagster Insights UI. This is a good way to track usage, cost, data size, or other characteristics of your data assets over time, and across different parts of your platform. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need a Dagster+ account on the Pro plan. + +--- + +## Step 1: Emit numeric values alongside your asset materializations + +To populate Insights with materialization metadata, you must first emit numeric metadata to be aggregated. In the below example, we attached a `num_rows` metadata value to the output of the `table1` asset: + +```python file=/concepts/assets/asset_materialization_metadata_none.py +from dagster import Output, asset + + +@asset +def table1() -> Output[None]: + ... # write out some data to table1 + return Output(None, metadata={"num_rows": 25}) +``` + +Insights lets us track this value over time and aggregate it between any other assets which might provide a `num_rows` value. + +Refer to the [Asset metadata](/concepts/metadata-tags/asset-metadata) documentation for more information about asset metadata. + +--- + +## Step 2: Enable viewing numeric metadata in Dagster+ Insights + +Once one or more assets are emitting numeric metadata values, you'll be able to enable viewing them in the Insights UI. **Note**: It may take up to 24 hours for new metadata types to be reflected in the UI. + +To modify the list of materialization metadata values shown in Insights, click **Edit** in the sidebar next to the **User provided metrics** header: + + + + + +In the dialog that appears, check or uncheck metrics to use them in Insights. Selected metrics will be immediately available to view in Insights in the sidebar and on individual asset pages. You can also change a metric's icon, display name, and description by clicking the **pencil icon** next to the metric. + +--- + +## Related + + + + + + + diff --git a/docs/content/dagster-cloud/insights/exporting-insights-metrics.mdx b/docs/content/dagster-plus/insights/exporting-insights-metrics.mdx similarity index 82% rename from docs/content/dagster-cloud/insights/exporting-insights-metrics.mdx rename to docs/content/dagster-plus/insights/exporting-insights-metrics.mdx index 05fc486b6a2c1..9e2450245f034 100644 --- a/docs/content/dagster-cloud/insights/exporting-insights-metrics.mdx +++ b/docs/content/dagster-plus/insights/exporting-insights-metrics.mdx @@ -1,19 +1,15 @@ --- -title: "Exporting metrics from Dagster Cloud Insights | Dagster Docs" -description: "Exporting metrics from Dagster Cloud Insights." +title: "Exporting metrics from Dagster+ Insights | Dagster Docs" +description: "Exporting metrics from Dagster+ Insights." platform_type: "cloud" --- -# Exporting metrics from Dagster Cloud Insights (Experimental) +# Exporting metrics from Dagster+ Insights - - This feature is considered experimental. - +Using a GraphQL API endpoint, you can export [Dagster+ Insights](/dagster-plus/insights) metrics from your Dagster+ instance. -Using a GraphQL API endpoint, you can export [Dagster Cloud Insights](/dagster-cloud/insights) metrics from your Dagster Cloud instance. - -Refer to the [Available Insights metrics](/dagster-cloud/insights#available-metrics) for a list of available metrics. +Refer to the [Available Insights metrics](/dagster-plus/insights#available-metrics) for a list of available metrics. --- @@ -21,10 +17,10 @@ Refer to the [Available Insights metrics](/dagster-cloud/insights#available-metr To complete the steps in this guide, you'll need: -- A Dagster Cloud account -- Access to the [Dagster Cloud Insights feature](/dagster-cloud/insights) -- A Dagster Cloud [user token](/dagster-cloud/account/managing-user-agent-tokens#managing-user-tokens) -- Your deployment-scoped Dagster Cloud deployment URL. For example: `dagster-university.dagster.cloud/prod` +- A Dagster+ account +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- A Dagster+ [user token](/dagster-plus/account/managing-user-agent-tokens#managing-user-tokens) +- Your deployment-scoped Dagster+ deployment URL. For example: `dagster-university.dagster.cloud/prod` --- diff --git a/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx b/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx new file mode 100644 index 0000000000000..ba03d220a2567 --- /dev/null +++ b/docs/content/dagster-plus/insights/integrating-bigquery-and-dbt.mdx @@ -0,0 +1,122 @@ +--- +title: "Integrating BigQuery & dbt with Dagster+ Insights | Dagster Docs" +description: "Integrating BigQuery metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating BigQuery & dbt with Dagster+ Insights + +BigQuery costs can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting BigQuery cost metrics about data operations to Dagster+. + +If you use dbt to materialize tables in BigQuery, use this guide to integrate BigQuery cost metrics into the Insights UI. For instructions on integrating direct BigQuery queries, see [Integrating Direct BigQuery Usage with Dagster+ Insights](/dagster-plus/insights/integrating-bigquery). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account on the Pro plan +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- BigQuery credentials which have access to the `INFORMATION_SCHEMA.JOBS` table (e.g. BigQuery Resource viewer role). These credentials should be provided used by your dbt profile. For more information on granting access to this table, see the [BigQuery documentation](https://cloud.google.com/bigquery/docs/information-schema-jobs). +- To install the following libraries: + + ```shell + pip install dagster dagster-cloud dagster-dbt dagster-gcp + ``` + + **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.7.0 or newer**. + +--- + +## Step 1: Instrument your Dagster code + + + + +First, append to the dbt CLI call in your Dagster function: + +```python +@dbt_assets(...) +def my_asset(context: AssetExecutionContext, dbt: DbtCliResource): + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt_resource.cli(["build"], context=context).stream().with_insights() +``` + +This passes through all underlying events and emits additional with BigQuery cost metrics. These metrics are obtained by querying the underlying `INFORMATION_SCHEMA.JOBS` table, using the BigQuery client from the dbt adapter. + + + + +First, append to the dbt CLI call in your Dagster op function: + +```python +@op(out={}) +def my_dbt_op(context: OpExecutionContext, dbt: DbtCliResource): + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt.cli( + ["build"], context=context, manifest=dbt_manifest_path + ).stream().with_insights() + +@job +def my_dbt_job(): + ... + my_dbt_op() + ... +``` + +This passes through all underlying events and emits additional `AssetObservation`s with BigQuery cost metrics. These metrics are obtained by querying the underlying `INFORMATION_SCHEMA.JOBS` table, using the BigQuery client from the dbt adapter. + + + + +--- + +## Step 2: Update dbt_project.yml + +Next, add the following to your dbt project's `dbt_project.yml`: + +```yaml +query-comment: + comment: "bigquery_dagster_dbt_v1_opaque_id[[[{{ node.unique_id }}:{{ invocation_id }}]]]" + append: true +``` + +This allows you to add a comment, containing the dbt invocation ID and unique ID, to every query recorded in BigQuery's `INFORMATION_SCHEMA.JOBS` table. Using this data, Insights will attribute cost metrics in BigQuery to the corresponding Dagster jobs and assets. + +--- + +## Step 3: View BigQuery usage in the Dagster UI + +Typically within 24 hours, the BigQuery metrics should be available in the **Insights** tab in the Dagster UI: + + + + + +The BigQuery cost metric is based off of the bytes billed for queries wrapped with `with_insights`, based on a unit price of $6.25 USD per TiB. + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/insights/integrating-bigquery.mdx b/docs/content/dagster-plus/insights/integrating-bigquery.mdx new file mode 100644 index 0000000000000..0458049c4eb4b --- /dev/null +++ b/docs/content/dagster-plus/insights/integrating-bigquery.mdx @@ -0,0 +1,81 @@ +--- +title: "Integrating Google BigQuery usage with Dagster+ Insights | Dagster Docs" +description: "Integrating BigQuery metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating Google BigQuery usage with Dagster+ Insights + +External metrics, such as Google BigQuery usage, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster+ via an API. + +If you use the [BigQuery resource](/\_apidocs/libraries/dagster-snowflake), use this guide to integrate BigQuery metrics into the Insights UI. For instructions on integrating usage of dbt models that run in BigQuery, see [Integrating BigQuery & dbt with Dagster+ Insights](/dagster-plus/insights/integrating-bigquery-and-dbt). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account on the Pro plan +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- BigQuery credentials that have access to the `INFORMATION_SCHEMA.JOBS` table. For more information on granting access to this table, see the [BigQuery documentation](https://cloud.google.com/bigquery/docs/information-schema-jobs). +- To install the following libraries: + + ```shell + pip install dagster dagster-cloud dagster-gcp + ``` + + **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.7.0 or newer**. + +--- + +## Step 1: Replace your BigQuery resources + +The first step is to replace any existing BigQuery resources with . This resource is a drop-in replacement for the resource, but it also emits BigQuery usage metrics to the Dagster+ Insights API. + +```python +from dagster_cloud.dagster_insights import InsightsBigQueryResource + +defs = Definitions( + resources={ + "bigquery": InsightsBigQueryResource(project="my-project") + } +) +``` + +--- + +## Step 2: View BigQuery usage in the Dagster UI + +Once the pipeline runs, BigQuery usage will be visible in the **Insights** tab in the Dagster UI: + + + + + +The BigQuery cost metric is based off of the bytes billed for queries performed using the , based on a unit price of $6.25 USD per TiB. + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/insights/integrating-external-metrics.mdx b/docs/content/dagster-plus/insights/integrating-external-metrics.mdx new file mode 100644 index 0000000000000..6597273cbfed8 --- /dev/null +++ b/docs/content/dagster-plus/insights/integrating-external-metrics.mdx @@ -0,0 +1,30 @@ +--- +title: "Integrating external metrics into Dagster+ Insights | Dagster Docs" +description: "Integrating external metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating external metrics into Dagster+ Insights + +External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster+ via an API. + +--- + +## Limitations + +Before you start, note that: + +- This is a Dagster+ Pro feature +- Up to two million individual data points may be added to Insights, per month +- External metrics data will only be retained for 120 days + +--- + +## Supported integrations + +Currently, Insights supports integrating metrics for: + +- **Usage generated by queries made to Google BigQuery and Snowflake resources.** Refer to the [BigQuery](/dagster-plus/insights/integrating-bigquery) and [Snowflake](/dagster-plus/insights/integrating-snowflake) integration guides for more information. + +- **Google BigQuery and Snowflake usage generated by dbt.** Refer to the [BigQuery](/dagster-plus/insights/integrating-bigquery-and-dbt) and [Snowflake](/dagster-plus/insights/integrating-snowflake-and-dbt) dbt integration guides for more information. diff --git a/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx b/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx new file mode 100644 index 0000000000000..35a6e87e267ae --- /dev/null +++ b/docs/content/dagster-plus/insights/integrating-snowflake-and-dbt.mdx @@ -0,0 +1,157 @@ +--- +title: "Integrating Snowflake & dbt with Dagster+ Insights | Dagster Docs" +description: "Integrating external metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating Snowflake & dbt with Dagster+ Insights + +External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster+ via an API. + +If you use dbt to materialize tables in Snowflake, use this guide to integrate Snowflake metrics into the Insights UI. For instructions on integrating direct Snowflake queries, see [Integrating Direct Snowflake Usage with Dagster+ Insights](/dagster-plus/insights/integrating-snowflake). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account on the Pro plan +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- Snowflake credentials which have access to the `snowflake.account_usage.query_history` table. For more information on granting access to this table, see the [Snowflake documentation](https://docs.snowflake.com/en/sql-reference/account-usage#enabling-the-snowflake-database-usage-for-other-roles). +- To install the following libraries: + + ```shell + pip install dagster dagster-cloud dagster-dbt dagster-snowflake + ``` + + **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.5.1 or newer**. + +--- + +## Step 1: Instrument your Dagster code + + + + +First, append to the dbt CLI call in your Dagster function: + +```python +@dbt_assets(...) +def my_asset(context: AssetExecutionContext, dbt: DbtCliResource): + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt_resource.cli(["build"], context=context).stream().with_insights() +``` + +This passes through all underlying events and emits an for each asset materialization. The observation contains the dbt invocation ID and unique ID recorded in the Dagster event log. + + + + +First, append to the dbt CLI call in your Dagster op function: + +```python +@op(out={}) +def my_dbt_op(context: OpExecutionContext, dbt: DbtCliResource): + # Chain `with_insights` after any other metadata fetch, e.g. `fetch_row_count` + yield from dbt.cli( + ["build"], context=context, manifest=dbt_manifest_path + ).stream().with_insights() + +@job +def my_dbt_job(): + ... + my_dbt_op() + ... +``` + +This passes through all underlying events and emits an `AssetObservation` for each asset materialization. The observation contains the dbt invocation ID and unique ID that are recorded in the Dagster event log. + + + + +--- + +## Step 2: Update dbt_project.yml + +Next, add the following to your dbt project's `dbt_project.yml`: + +```yaml +query-comment: + comment: "snowflake_dagster_dbt_v1_opaque_id[[[{{ node.unique_id }}:{{ invocation_id }}]]]" + append: true +``` + +This allows you to add a comment, containing the dbt invocation ID and unique ID, to every query recorded in Snowflake's `query_history` table. Using this data, Insights will attribute cost metrics in Snowflake to the corresponding Dagster jobs and assets. + +**Note**: Make sure to include `append: true`, as Snowflake strips leading comments. + +--- + +## Step 3: Create a metrics ingestion pipeline in Dagster + +The last step is to create a Dagster pipeline that joins asset observation events with the Snowflake query history and calls the Dagster+ ingestion API. Snowflake usage information is available at a delay, so this pipeline will run on a schedule to ingest Snowflake usage information from the previous hour. + +Note that you only need to create this pipeline in a single code location per deployment, even if you have instrumented dbt assets in multiple code locations. + +To do this, you'll need a Snowflake resource () that can query the `snowflake.account_usage.query_history` table. You can set up the ingestion pipeline like the following: + +```python +from dagster_snowflake import SnowflakeResource +from dagster import Definition, EnvVar + +from dagster_cloud.dagster_insights import ( + create_snowflake_insights_asset_and_schedule, +) + +snowflake_insights_definitions = create_snowflake_insights_asset_and_schedule( + start_date="2023-10-5-00:00", + snowflake_resource_key="snowflake_insights", +) + +defs = Definitions( + assets=[..., *snowflake_insights_definitions.assets], + schedules=[..., snowflake_insights_definitions.schedule], + resources={ + ..., + "snowflake_insights": SnowflakeResource( + account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), + user=EnvVar("SNOWFLAKE_PURINA_USER"), + password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), + ), + } +) +``` + +In this example, the `snowflake_resource_key` is a that has access to the `query_history` table. + +Snowflake credit metrics should be available on the **Insights** tab in the Dagster UI after a short period of time (within 24 hours) of the ingestion job running: + + + + + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/insights/integrating-snowflake.mdx b/docs/content/dagster-plus/insights/integrating-snowflake.mdx new file mode 100644 index 0000000000000..b1265a20f82ff --- /dev/null +++ b/docs/content/dagster-plus/insights/integrating-snowflake.mdx @@ -0,0 +1,118 @@ +--- +title: "Integrating direct Snowflake usage with Dagster+ Insights | Dagster Docs" +description: "Integrating external metrics with Dagster Insights." + +platform_type: "cloud" +--- + +# Integrating Snowflake usage with Dagster+ Insights + +External metrics, such as Snowflake credits, can be integrated into the Dagster Insights UI. The [`dagster-cloud`](https://pypi.org/project/dagster-cloud/) package contains utilities for capturing and submitting external metrics about data operations to Dagster+ via an API. + +If you use the [Snowflake Resource](/\_apidocs/libraries/dagster-snowflake) to query Snowflake, use this guide to integrate Snowflake metrics into the Insights UI. For instructions on integrating usage of dbt models which run in Snowflake, see [Integrating Snowflake & dbt with Dagster+ Insights](/dagster-plus/insights/integrating-snowflake-and-dbt). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account on the Pro plan +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- Snowflake credentials which have access to the `snowflake.account_usage.query_history` table. For more information on granting access to this table, see the [Snowflake documentation](https://docs.snowflake.com/en/sql-reference/account-usage#enabling-the-snowflake-database-usage-for-other-roles). +- To install the following libraries: + + ```shell + pip install dagster dagster-cloud dagster-snowflake + ``` + + **Note**: If you already have `dagster-cloud` installed, **make sure you're using version 1.5.8 or newer**. + +--- + +## Step 1: Replace your Snowflake resources + +The first step is to replace any existing Snowflake resources with . This resource is a drop-in replacement for the resource, but it also emits Snowflake usage metrics to the Dagster+ Insights API. + +```python +from dagster_cloud.dagster_insights import InsightsSnowflakeResource + +defs = Definitions( + resources={ + "snowflake": InsightsSnowflakeResource( + account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), + user=EnvVar("SNOWFLAKE_PURINA_USER"), + password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), + ), + } +) +``` + +--- + +## Step 2: Create a metrics ingestion pipeline in Dagster + +The second step is to create a Dagster pipeline that joins asset observation events with the Snowflake query history and calls the Dagster+ ingestion API. Snowflake usage information is available at a delay, so this pipeline will run on a schedule to ingest Snowflake usage information from the previous hour. + +Note that you only need to create this pipeline in a single code location per deployment, even if you have assets in multiple code locations. + +To do this, you'll need a Snowflake resource () that can query the `snowflake.account_usage.query_history` table. You can set up the ingestion pipeline like the following: + +```python +from dagster import Definition, EnvVar + +from dagster_cloud.dagster_insights import ( + InsightsSnowflakeResource, + create_snowflake_insights_asset_and_schedule, +) + +snowflake_insights_definitions = create_snowflake_insights_asset_and_schedule( + start_date="2023-10-5-00:00", + snowflake_resource_key="snowflake_insights", +) + +defs = Definitions( + assets=[..., *snowflake_insights_definitions.assets], + schedules=[..., snowflake_insights_definitions.schedule], + resources={ + ..., + "snowflake_insights": InsightsSnowflakeResource( + account=EnvVar("SNOWFLAKE_PURINA_ACCOUNT"), + user=EnvVar("SNOWFLAKE_PURINA_USER"), + password=EnvVar("SNOWFLAKE_PURINA_PASSWORD"), + ), + } +) +``` + +In this example, the `snowflake_resource_key` is a that has access to the `query_history` table. + +Once the pipeline runs, Snowflake credits will be visible in the **Insights** tab in the Dagster UI: + + + + + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments.mdx b/docs/content/dagster-plus/managing-deployments.mdx new file mode 100644 index 0000000000000..8055b91a78e46 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments.mdx @@ -0,0 +1,103 @@ +--- +title: "Managing Dagster+ deployments | Dagster Docs" +--- + +# Managing Dagster+ deployments + +Learn how to deploy your code to Dagster+, use command line tools, set up CI/CD, and define environment variables. + + + + + + + + +--- + +## Alerts + + + + + + + + + + + +--- + +## Environment variables and secrets + + + + + + + +--- + +## Branch deployments + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts.mdx b/docs/content/dagster-plus/managing-deployments/alerts.mdx new file mode 100644 index 0000000000000..ff77d7908619f --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts.mdx @@ -0,0 +1,213 @@ +--- +title: "Dagster+ alerts | Dagster Docs" +description: "Create and configure alerts in Dagster+." + +platform_type: "cloud" +feature: "alerting" +--- + +# Dagster+ alerts + + + +Using Dagster+'s built-in alerting system, you can stay in the loop on the events occurring in your deployment. Dagster+ alerts integrate with several popular services, allowing you to deliver notifications when and where they're most needed. + +--- + +## How it works + +Alerts are created using **alert policies**. An alert policy defines [the conditions that will trigger an alert](#alert-policy-types) and [how the alert will be sent](#supported-notification-services). For example, you can create a policy that sends an alert to a Slack channel when any asset in a specific asset group fails to materialize. + +Alert policies are configured on a **per-deployment basis**. This means, for example, that asset alerts configured in a `prod` deployment are only applicable to assets in that deployment. + +### Alert policy types + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Policy type + How it works
    + Asset alert + + Triggers on: +
      +
    • + Asset materializations - Failure or success +
    • +
    • + + Asset checks + {" "} + - Error, warn, passed, or failure to execute. Note:{" "} + By default, asset checks that fail will have a severity of{" "} + WARN. +
    • +
    + Alerts can be scoped to asset groups or specific asset keys. Asset check + alerts are sent for any checks on those assets.{" "} + If using Dagster+ Pro, this type of alert also allows + you to send alerts to{" "} + + asset owners + + .
    +
    + Note: + External assets + do not trigger asset alerts. +
    + Run alert + + Triggers on job run success, failure, or time limit exceeded; may + optionally include a set of configured tags. If an alert policy has no + configured tags, all jobs will be eligible for that alert. Otherwise, + only jobs that contain all the tags for a given alert policy are + eligible for that alert. +
    + Each newly created organization starts with a long-running run alert + policy, which sends an email to the email address used to create the + organization when a job run exceeds 24 hours. +
    + Schedule/Sensor alert + + Triggers when a schedule or sensor tick failure occurs for any schedule + or sensor in the deployment. Note: Alerts are sent only + when the schedule/sensor changes from success to{" "} + failure, so subsequent failures won't trigger new + alerts. +
    + Code location error alert + Triggers when a code location fails to load due to an error.
    + Agent downtime alert + + Triggers when a Hybrid agent hasn't heartbeated within the last five + minutes. Available for{" "} + Hybrid deployments only. +
    + Insights metric alert + + Sends a notification when a{" "} + Dagster+ Insights metric exceeds or + falls below a specified threshold over a specified time window. This can + be used to alert on: +
      +
    • + Dagster credit usage across a deployment or for a specific job +
    • +
    • Performance regressions on asset or job runtime
    • +
    • Spend on external tools such as Snowflake or BigQuery credits
    • +
    + Alerts can be scoped to the sum of any metric across an entire + deployment, or for a specific job, asset group, or asset key. +
    +
    + Note: Alerts are sent only when the threshold is first crossed, + and will not be sent again until the value returns to expected levels. Insights + data may become available up to 24 hours after run completion. +
    + Credit budget alert (self-serve plans only){" "} + + + + This feature is currently only available for Dagster+ self-serve plans + (Solo and Starter). + {" "} + Sends a notification when your organization has reached the monthly + credit limit. +
    + Each newly created organization starts with an alert policy of this + type, directed at the email address used to create the organization. +
    + +### Supported notification services + +Dagster+ can send notifications via: + +- [Email](/dagster-plus/managing-deployments/alerts/email), either to a list of recipients or asset owners +- [Microsoft Teams](/dagster-plus/managing-deployments/alerts/microsoft-teams) +- [Slack](/dagster-plus/managing-deployments/alerts/slack) +- [PagerDuty](/dagster-plus/managing-deployments/alerts/pagerduty) + +--- + +## Managing alert policies + +Managing alert policies can be accomplished by using: + +- [The Dagster+ UI](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui) +- [The `dagster-cloud` command-line interface (CLI)](/dagster-plus/managing-deployments/alerts/managing-alerts-cli) + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/email.mdx b/docs/content/dagster-plus/managing-deployments/alerts/email.mdx new file mode 100644 index 0000000000000..b414df3ee4078 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/email.mdx @@ -0,0 +1,116 @@ +--- +title: "Email & Dagster+ | Dagster Docs" +description: "Create alerts to send emails for specified Dagster+ events." + +platform_type: "cloud" +feature: "alerting" +--- + +# Email & Dagster+ + + + +Dagster+ supports [sending alerts](/dagster-plus/managing-deployments/alerts) as emails to one or more recipients. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization, Admin, or Editor permissions in Dagster+**, which are required to create and manage alerts + +--- + +## Creating an alert policy + +Creating an alert policy can be done using the Dagster+ UI or the `dagster-cloud` CLI and a YAML file. + + + + +### In the UI + +1. Create and configure an alert policy. Refer to the [Managing alerts in Dagster+ guide](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui) for more info. + +2. In the **Notification Service** section of the **Create alert policy** window, select the **Email** option. + +3. In the field that displays, enter the email address notifications should be sent to. To add multiple recipients, press **Enter** after each email address. + + + + Email alert configured to alert two recipients + +4. Click **Save policy**. + + + + +### dagster-cloud CLI + +To define an email alert policy in code, use the `notification_service.email.email_addresses` key to provide a list of recipient email addresses. For example: + +```yaml file=/dagster_cloud/alerts/email_policy.yaml +# alert_policies.yaml + +alert_policies: + - name: "email-alert-policy" + description: "An alert policy to email company executives during job failure." + tags: + - key: "level" + value: "critical" + event_types: + - "JOB_FAILURE" + notification_service: + email: + email_addresses: + - "richard.hendricks@hooli.com" + - "nelson.bighetti@hooli.com" +``` + +Then, use the `dagster-cloud` CLI to set the alert policies for the deployment: + +```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_set_alerts endbefore=end_set_alerts +dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml +``` + +Refer to the [Managing alerts using the `dagster-cloud` CLI](/dagster-plus/managing-deployments/alerts/managing-alerts-cli) guide for more information. + + + + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-cli.mdx b/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-cli.mdx new file mode 100644 index 0000000000000..2d97cbe8c150c --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-cli.mdx @@ -0,0 +1,136 @@ +--- +title: "Managing alert policies with the dagster-cloud CLI | Dagster Docs" +description: "Create and configure Dagster+ alerts using the dagster-cloud CLI." + +platform_type: "cloud" +feature: "alerting" +--- + +# Managing alert policies with the dagster-cloud CLI + + + +In this guide, we'll walk you through managing Dagster+ alert policies using the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli). Alert policies apply to a full deployment (ex: `prod`), not Branch Deployments. + +**Don't want to work in code?** You can also define and set alert policies using the [Dagster+ UI](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui). + +--- + +## How it works + +Alert policies are defined in a YAML file and then, using the `dagster-cloud` CLI, set for a deployment. The YAML file should contain a single top-level key (`alert_policies`) and contain a list of alert policy objects. For example: + +```yaml file=/dagster_cloud/alerts/email_policy.yaml +# alert_policies.yaml + +alert_policies: + - name: "email-alert-policy" + description: "An alert policy to email company executives during job failure." + tags: + - key: "level" + value: "critical" + event_types: + - "JOB_FAILURE" + notification_service: + email: + email_addresses: + - "richard.hendricks@hooli.com" + - "nelson.bighetti@hooli.com" +``` + +### Compatible event types + +Only certain `event_types` can be specified together when using the CLI to create alerts. For example, multiple run-based event types can be included in the same policy, but attempting to include a tick event (such as `TICK_FAILURE`) will result in an error. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- **To install the `dagster-cloud` CLI.** Refer to the [`dagster-cloud` CLI reference](/dagster-plus/managing-deployments/dagster-plus-cli#installing-the-cli) for instructions. +- **Organization, Admin, or Editor permissions in Dagster+**, which are required to create and manage alerts +- **Optional**: If using an integration like [Microsoft Teams](/dagster-plus/managing-deployments/alerts/microsoft-teams) or [Slack](/dagster-plus/managing-deployments/alerts/slack) as a notification service, complete the integration setup before proceeding. Refer to the [Supported notification services](/dagster-plus/managing-deployments/alerts#supported-notification-services) for a complete list of integrations. + +--- + +## Setting policies for a deployment + +In this example, we'll configure an email alert when a job fails. This job, named `important_job`, has a `level` tag of `"critical"`: + +```python +def important_computation(): + ... + + +@job(tags={"level": "critical"}) +def important_job(): + important_computation() +``` + +1. First, define a list of alert policies in a YAML file. For example, the following policy listens for jobs with a tag of `level: critical` to fail: + + ```yaml file=/dagster_cloud/alerts/email_policy.yaml + # alert_policies.yaml + + alert_policies: + - name: "email-alert-policy" + description: "An alert policy to email company executives during job failure." + tags: + - key: "level" + value: "critical" + event_types: + - "JOB_FAILURE" + notification_service: + email: + email_addresses: + - "richard.hendricks@hooli.com" + - "nelson.bighetti@hooli.com" + ``` + +2. Next, use the following command to set the policies for the deployment: + + ```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_set_alerts endbefore=end_set_alerts + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + +--- + +## Viewing a deployment's policies + +To list the policies currently configured on the deployment, run: + +```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_view_alerts endbefore=end_view_alerts +dagster-cloud deployment alert-policies list +``` + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui.mdx b/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui.mdx new file mode 100644 index 0000000000000..a02d55d9df1b4 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui.mdx @@ -0,0 +1,169 @@ +--- +title: "Managing alert policies in Dagster+ | Dagster Docs" +description: "Create and manage alert policies using the Dagster UI." + +platform_type: "cloud" +feature: "alerting" +--- + +# Managing alert policies in Dagster+ + + + +In this guide, we'll walk you through managing alert policies in Dagster+. + +**Prefer to work in code?** You can also define and set alert policies using the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/alerts/managing-alerts-cli). + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization, Admin, or Editor permissions in Dagster+**, which are required to create and manage alerts +- **Optional**: If using an integration like [Microsoft Teams](/dagster-plus/managing-deployments/alerts/microsoft-teams) or [Slack](/dagster-plus/managing-deployments/alerts/slack) as a notification service, complete the integration setup before proceeding. Refer to the [Supported notification services](/dagster-plus/managing-deployments/alerts#supported-notification-services) for a complete list of integrations. + +--- + +## Creating alert policies + +1. Sign in to your Dagster+ account. + +2. In the top navigation, click **Deployment**. + +3. Click the **Alerts** tab. + +4. Click **+ Create alert policy**. + +5. In the **Create alert policy** window, select an [alert type](/dagster-plus/managing-deployments/alerts#alert-policy-types) and enter a name and description for the alert. + +6. **If creating an asset or run alert policy**, you'll be prompted to fill in the following: + +
    + + Asset alert policies + +
      +
    • + Target - Select the assets that the alert targets: +
        +
      • + All assets - All assets in the deployment +
      • +
      • + Group - All assets in a specific asset group in + the deployment +
      • +
      • + Asset key - A specific asset key in the deployment +
      • +
      +
    • +
    • + Events - Select the event(s) that should trigger the + alert. For example: asset materialization failure, asset check passed. +
    • +
    +
    + +
    + + Run alert policies + +
      +
    • + Tags - Add tag(s) for the alert policy. Jobs with + these tags will trigger the alert. For example:{" "} + level:critical or team:sales +
    • +
    • + Events - Select whether the alert should trigger on + job success, failure, or both +
    • +
    +
    + +7. In the **Notification service** section, select how the alert will be delivered. Refer to the [Supported notification services](/dagster-plus/managing-deployments/alerts#supported-notification-services) section for information about setting up these services. + + **Note**: If creating an asset alert, an additional **Email asset owners (experimental)** option will be available. + +8. When finished, click **Save policy**. + +--- + +## Editing alert policies + +### Editing a single policy + +1. Click the **Edit** button to the right of the policy. +2. Make changes as desired. +3. When finished, click **Save policy** to save the changes. + +### Editing policies in bulk + +You can also edit policies in bulk by making changes to the policies' underlying YAML. + +1. Click **⌄ (More actions) > Edit policies**, which is next to the **Create alert policy** button: + + + + Highlighted More Actions menu in the Dagster+ UI + + This will open a window displaying all alert policies in YAML format: + + + + Editing YAML for multiple alert policies + +2. Make changes as desired. + +3. When finished, click **Save Schema** to save the changes. + +--- + +## Enabling and disabling alert policies + +To enable or disable an alert, use the toggle on the left side of the alert policy. + +--- + +## Deleting alert policies + +To delete an alert policy, click the **Delete** button next to the policy. When prompted, confirm the deletion. + +--- + +## Related + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/microsoft-teams.mdx b/docs/content/dagster-plus/managing-deployments/alerts/microsoft-teams.mdx new file mode 100644 index 0000000000000..15089f066cb16 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/microsoft-teams.mdx @@ -0,0 +1,111 @@ +--- +title: "Microsoft Teams & Dagster+" +description: "Create policies that send alerts for Dagster+ events to Microsoft Teams." + +platform_type: "cloud" +feature: "alerting" +--- + +# Microsoft Teams & Dagster+ + + + +Dagster+ supports [sending alerts](/dagster-plus/managing-deployments/alerts) to Microsoft Teams using incoming webhooks. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- **Permissions in Microsoft Teams** that allow you to create an incoming webhook +- **Organization, Admin, or Editor permissions in Dagster+**, which are required to create and manage alerts + +--- + +## Step 1: Create an incoming webhook in Microsoft Teams + +To create an incoming webhook, follow the instructions in the [Microsoft Teams documentation](https://docs.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook). + +Keep the webhook URL handy - you'll need it in the next step. + +--- + +## Step 2: Create an alert policy in Dagster+ + +Creating an alert policy can be done using the Dagster+ UI or the `dagster-cloud` CLI and a YAML file. + + + + +### In the UI + +1. Create and configure an alert policy. Refer to the [Managing alerts in Dagster+ guide](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui) for more info. +2. In the **Notification Service** section of the **Create alert policy** window, select the **Microsoft Teams** option. +3. Paste the Microsoft Teams webhook URL into the field that displays. +4. Click **Save policy**. + + + + +### dagster-cloud CLI + +To define a Microsoft Teams alert policy in code, use the `notification_service.microsoft_teams.webhook_url` key to provide the webhook URL. For example: + +```yaml file=/dagster_cloud/alerts/microsoft_teams_policy.yaml +# alert_policies.yaml + +alert_policies: + - name: "ms-teams-alert-policy" + description: "An alert policy to send a Microsoft Teams webhook during job failure." + tags: + - key: "level" + value: "critical" + event_types: + - "AGENT_UNAVAILABLE" + notification_service: + microsoft_teams: + webhook_url: "https://yourdomain.webhook.office.com/..." +``` + +Then, use the `dagster-cloud` CLI to set the alert policies for the deployment: + +```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_set_alerts endbefore=end_set_alerts +dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml +``` + +Refer to the [Managing alerts using the `dagster-cloud` CLI](/dagster-plus/managing-deployments/alerts/managing-alerts-cli) guide for more information. + + + + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/pagerduty.mdx b/docs/content/dagster-plus/managing-deployments/alerts/pagerduty.mdx new file mode 100644 index 0000000000000..8171f3a336e4b --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/pagerduty.mdx @@ -0,0 +1,160 @@ +--- +title: "PagerDuty & Dagster+ | Dagster Docs" +description: "Create alerts that make PagerDuty incidents based on events in Dagster+." + +platform_type: "cloud" +feature: "alerting" +integration: "pagerduty" +--- + +# PagerDuty & Dagster+ + + + +Dagster+ supports [sending alerts](/dagster-plus/managing-deployments/alerts) to PagerDuty, which will automatically create a PagerDuty incident. + +--- + +## PagerDuty & Dagster+ integration benefits + +- Empower your team with real-time incident response by integrating Dagster+ and PagerDuty +- Notify on-call responders when alerts are triggered in Dagster+ +- Dramatically reduce downtime for issues related to asset materialization, asset checks, job runs, schedules, sensors, and code locations +- Customize the urgency and routing of PagerDuty incidents based on the content of the Dagster+ event payload + +--- + +## How it works + +- Events that trigger an alert in Dagster+ will automatically create a PagerDuty incident +- Incidents can be triggered for events related to runs, assets, schedules, sensors, and code locations +- Users must resolve incidents in PagerDuty. Dagster+ doesn't currently automatically resolve incidents. + +### Requirements + +To integrate PagerDuty with Dagster+, you'll need: + +- **Permissions in PagerDuty that allow you to manage services and integrations.** Refer to PagerDuty's [User roles](https://support.pagerduty.com/docs/user-roles) documentation for more information. +- **The following in Dagster+**: + - **A Pro plan** + - **Organization, Admin, or Editor permissions**, which are required to create and manage alerts + +--- + +## Integration walkthrough + +### In PagerDuty + +1. From the **Configuration** menu, click **Services > Service Directory**. + +2. There are two ways to add an integration to a service: + + - **If adding your integration to an existing service**: Click the **name** of the service you want to add the integration to. Then, select the **Integrations** tab and click the **New Integration** button. + + - **If creating a new service for your integration**: Refer to PagerDuty's [Configuring services and integrations documentation](https://support.pagerduty.com/docs/services-and-integrations#section-configuring-services-and-integrations) and follow the steps outlined in the [Create a new service](https://support.pagerduty.com/docs/services-and-integrations#section-create-a-new-service) section, selecting **Dagster+** as the **Integration Type** in step 4. Continue with the [**In Dagster+**](#in-dagster) section (below) once finished. + +3. Enter an **Integration Name** in the format `monitoring-tool-service-name` (e.g. `dagster-plus-run-failures`) and select **Dagster+** from the **Integration Type** menu. + +4. Click the **Add Integration** button to save your new integration. You will be redirected to the **Integrations** tab for your service. + +5. An **Integration Key** will be generated on this screen. Keep this key saved in a safe place, as it will be used when you configure the integration with **Dagster+** in the next section. + + + + Highlighted integration key field in the PagerDuty UI + +### In Dagster+ + +Creating an alert policy can be done using the Dagster+ UI or in code. + + + + +#### In the UI + +1. Sign into your Dagster+ account. + +2. Create and configure an alert policy as per the [alert policy documentation](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui#creating-alert-policies). + +3. In the **Notification Service** section of the **Create alert policy** window: + + 1. Select **PagerDuty**. + 2. In the field that displays, paste the PagerDuty integration key you created in the previous section. + +4. Click **Save policy**. + + + + +#### dagster-cloud CLI + +To define a PagerDuty alert policy in code, use the `notification_service.pagerduty.integration_key` key to provide the integration key. For example: + +```yaml file=/dagster_cloud/alerts/pagerduty_policy.yaml +# alert_policies.yaml + +alert_policies: + - name: "pagerduty-alert-policy" + description: "An alert policy to create a PagerDuty incident upon job failure." + tags: + - key: "level" + value: "critical" + event_types: + - "AGENT_UNAVAILABLE" + notification_service: + pagerduty: + integration_key: "" +``` + +Then, use the `dagster-cloud` CLI to set the alert policies for the deployment: + +```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_set_alerts endbefore=end_set_alerts +dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml +``` + +Refer to the [Managing alerts using the `dagster-cloud` CLI](/dagster-plus/managing-deployments/alerts/managing-alerts-cli) guide for more information. + + + + +--- + +## Uninstalling PagerDuty + +To uninstall PagerDuty, click the **Delete** button next to the policy. When prompted, confirm the deletion. + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/alerts/slack.mdx b/docs/content/dagster-plus/managing-deployments/alerts/slack.mdx new file mode 100644 index 0000000000000..4a52fbed84579 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/alerts/slack.mdx @@ -0,0 +1,136 @@ +--- +title: "Slack & Dagster+ | Dagster Docs" +description: "Create alerts that send messages to Slack channels for Dagster+ events." + +platform_type: "cloud" +feature: "alerting" +--- + +# Slack & Dagster+ + + + +Dagster+ supports [sending alerts](/dagster-plus/managing-deployments/alerts) to Slack channels using a Slack workplace app. Messages will be posted using a `@Dagster+` bot user. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- **Permissions in Slack** that allow you to add apps to your Slack workspace +- **Organization, Admin, or Editor permissions in Dagster+**, which are required to create and manage alerts + +--- + +## Step 1: Connect Slack to Dagster+ + +1. Sign in to your Dagster+ account. +2. In the top navigation, click **Deployment**. +3. Click the **Alerts** tab. +4. Click **Connect to Slack** and complete the installation process. + +--- + +## Step 2: Invite the Dagster+ bot to Slack channels + +After the app is installed in your Slack workspace, invite the `@Dagster+` bot user to the channel where notifications should be sent. + +--- + +## Step 3: Create an alert policy in Dagster+ + +Creating an alert policy can be done using the Dagster+ UI or the `dagster-cloud` CLI and a YAML file. + + + + +### In the UI + +1. Create and configure an alert policy. Refer to the [Managing alerts in Dagster+ guide](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui) for more info. + +2. In the **Notification Service** section of the **Create alert policy** window, select the **Slack** option. + +3. In the field that displays, enter the name of the Slack channel you added the `@Dagster+` bot user to. + + **Note**: One channel is allowed per alert policy. To post alerts to multiple channels, first invite the `@Dagster+` bot and then create a new alert policy that posts to that channel. + +4. Click **Save policy**. + + + + +### dagster-cloud CLI + +To define a Slack alert policy in code, use the following keys to configure the alert: + +- `notification_service.slack.slack_workspace_name` - The name of the Slack workspace connected to Dagster+ +- `notification_service.slack.slack_channel_name` - The name of the Slack channel the `@Dagster+` user is a member of + +```yaml file=/dagster_cloud/alerts/slack_policy.yaml +# alert_policies.yaml + +alert_policies: + - name: "slack-alert-policy" + description: "An alert policy to send a Slack notification to sales on job failure or success." + tags: + - key: "team" + value: "sales" + event_types: + - "JOB_SUCCESS" + - "JOB_FAILURE" + notification_service: + slack: + slack_workspace_name: "hooli" + slack_channel_name: "sales-notifications" +``` + +Then, use the `dagster-cloud` CLI to set the alert policies for the deployment: + +```bash file=/dagster_cloud/alerts/cli_commands.yaml startafter=start_set_alerts endbefore=end_set_alerts +dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml +``` + +Refer to the [Managing alerts using the `dagster-cloud` CLI](/dagster-plus/managing-deployments/alerts/managing-alerts-cli) guide for more information. + + + + +--- + +## Disconnecting Slack + +To disconnect Dagster+ from Slack, remove the Dagster+ app from your Slack workspace. Refer to [Slack's documentation](https://slack.com/help/articles/360003125231-Remove-apps-and-custom-integrations-from-your-workspace#remove-an-app) for more info and instructions. + +Once the app is removed, refresh the **Alerts** page in Dagster+ and the **Connect to Slack** option will be displayed. + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/branch-deployments.mdx b/docs/content/dagster-plus/managing-deployments/branch-deployments.mdx new file mode 100644 index 0000000000000..735b62447eae7 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/branch-deployments.mdx @@ -0,0 +1,237 @@ +--- +title: Branch Deployments in Dagster+ | Dagster Docs +description: Develop and test in the cloud. +--- + +# Branch Deployments in Dagster+ + +Dagster+ provides out-of-the-box support for Continuous Integration (CI) with **Branch Deployments**. + +Branch Deployments automatically create staging environments of your Dagster code, right in Dagster+. For every push to a branch in your git repository, Dagster+ will create a unique deployment, allowing you to preview the changes in the branch in real-time. + +--- + +## Understanding Branch Deployments + +- [Overview](#overview) +- [Benefits](#benefits) +- [Requirements](#requirements) +- [Supported platforms](#supported-platforms) +- [Limitations](#limitations) +- [Output handling](#output-handling) +- [Change Tracking](#change-tracking) + +### Overview + +Think of a branch deployment as a branch of your data platform, one where you can preview changes without impacting production or overwriting a testing environment. + + + +Let's take a closer look: + +1. In your git repository, a new branch is created off of `main`. In the example above, this branch is named `feature-1`. + +2. Dagster+ is notified of the push and creates a branch deployment named `feature-1`. The branch deployment functions just like your `production` deployment of Dagster+, but contains the Dagster code changes from the `feature-1` branch. + + In this example, the `feature-1` branch deployment 'talks' to a `cloned schema` in a database. This is completely separate from the `prod schema` associated with the `production` deployment. + +3. For every push to the `feature-1` branch, the `feature-1` branch deployment in Dagster+ is rebuilt and redeployed. + +### Benefits + +Now that you know how Branch Deployments work, **why should you use them**? + +- **Improved collaboration.** Branch Deployments make it easy for everyone on your team to stay in the loop on the latest Dagster changes. +- **Reduced development cycle.** Quickly test and iterate on your changes without impacting production or overwriting a testing environment. + +### Requirements + +To use Branch Deployments, you'll need a [Dagster+ account](https://dagster.cloud/). + +### Supported platforms + +Branch Deployments can be used with any git or CI provider. However, setup is easiest with the [Dagster GitHub app](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github) or [Dagster Gitlab app](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab) as parts of the process are automated. Refer to the [Setting up Branch Deployments section](#setting-up-branch-deployments) for more info. + +### Limitations + +Branch Deployments aren't currently supported for use in Dagster Open Source. + +### Output handling + +Output created from a branch deployment - such as a database, table, etc. - won't be automatically removed from storage once a branch is merged or closed. Refer to the [Best practices section](#best-practices) for info on how to handle this. + +### Change Tracking + +When a Branch Deployment is deployed, it compares the asset definitions in the branch deployment with the asset definitions in the main deployment. The Dagster UI will then mark the changed assets, making it easy to identify changes. Refer to the [Branch Deployments Change Tracking](/dagster-plus/managing-deployments/branch-deployments/change-tracking) guide for more information and examples. + +--- + +## Managing Branch Deployments + +- [Setting up Branch Deployments](#setting-up-branch-deployments) +- [Accessing a branch deployment](#accessing-a-branch-deployment) + +### Setting up Branch Deployments + +There are currently two ways to set up Branch Deployments for Dagster+. In the table below: + +- **Platform** - The name of the git/CI platform, which is also a link to a setup guide +- **How it works** - Summary of how Branch Deployments work with the platform +- **May be a good fit if...** - A high-level summary of when the platform may be a good fit + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Platform + How it worksMay be a good fit if...
    + + GitHub + + GitHub Actions +
      +
    • + You use GitHub for version control +
    • +
    • You want Dagster to fully automate Branch Deployments
    • +
    +
    + + Gitlab + + Gitlab CI/CD +
      +
    • + You use Gitlab for version control +
    • +
    • You want Dagster to fully automate Branch Deployments
    • +
    +
    + + Other git/CI platform + + + + dagster-cloud CLI + + +
      +
    • + You don't use GitHub or Gitlab for version control +
    • +
    • You use an alternate CI platform
    • +
    • You want full control over Branch Deployment configuration
    • +
    +
    + +### Accessing a branch deployment + +Once configured, branch deployments can be accessed: + + + + +Every pull request in the repository contains a **View in Cloud** link: + + + +Clicking the link will open a branch deployment - or a preview of the changes - in Dagster+. + + + + + + To access a Branch Deployment in Dagster+, you need permissions that grant you{" "} + + access to Branch Deployments + {" "} + and the code location associated with the Branch Deployment. + + +You can also access branch deployments directly in Dagster+ from the **deployment switcher**: + + + + + + +--- + +## Best practices + +To ensure the best experience when using Branch Deployments, we recommend: + +- **Configuring jobs based on environment**. Dagster automatically sets [environment variables](/dagster-plus/managing-deployments/reserved-environment-variables) containing deployment metadata, allowing you to parameterize jobs based on the executing environment. Use these variables in your jobs to configure things like connection credentials, databases, and so on. This practice will allow you to use Branch Deployments without impacting production data. + +- **Creating jobs to automate output cleanup.** As Branch Deployments don't automatically remove [the output they create](#output-handling), you may want to create an additional Dagster job to perform the cleanup. + +**Want some help with implementation?** Check out the [Testing against production with Dagster+ Branch Deployments guide](/guides/dagster/branch_deployments) for a step-by-step look at implementing these best practices in your data pipelines. diff --git a/docs/content/dagster-plus/managing-deployments/branch-deployments/change-tracking.mdx b/docs/content/dagster-plus/managing-deployments/branch-deployments/change-tracking.mdx new file mode 100644 index 0000000000000..26145700051bf --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/branch-deployments/change-tracking.mdx @@ -0,0 +1,264 @@ +--- +title: "Branch Deployment Change Tracking in Dagster+ | Dagster Docs" +description: "Change tracking in Branch Deployments makes it eaiser for you and your team to identify how changes in a pull request will impact data assets." +--- + +# Branch Deployment Change Tracking in Dagster+ + + + +Branch Deployments Change Tracking makes it eaiser for you and your team to identify how changes in a pull request will impact data assets. By the end of this guide, you'll understand how Change Tracking works and what types of asset changes can be detected. + +--- + +## How it works + +Branch Deployments compare asset definitions in the branch deployment against the asset definitions in the main deployment. The UI will then mark changed assets accordingly. For example, if the pull request associated with the branch deployment adds a new asset, the UI will display a label indicating the addition. + +You can also apply filters to show only new and changed assets in the UI. This makes it easy to understand which assets will be impacted by the changes in the pull request associated with the branch deployment. + +**Note:** The default main deployment is `prod`. To configure a different deployment as the main deployment, [create a branch deployment using the dagster-cloud CLI](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments) and specify it using the optional `--base-deployment-name` parameter. + +--- + +## Supported change types + +Change Tracking can detect the following changes to assets: + +- [New assets](#new-assets) +- [Code versions](#code-versions) +- [Upstream dependencies](#upstream-dependencies) +- [Partitions definitions](#partitions-definitions) +- [Tags](#tags) +- [Metadata](#metadata) + +### New assets + +If an asset is new in the branch deployment, the asset will have a **New in branch** label in the asset graph: + + + +### Code versions + +If using the `code_version` argument on the asset decorator, Change Tracking can detect when this value changes. + +Some Dagster integrations, like `dagster-dbt`, automatically compute code versions for you. For more information on code versions, refer to the [Code versioning guide](/guides/dagster/asset-versioning-and-caching). + + + + +In this example, the `customers` asset has a **Changed in branch** label indicating its `code_version` has been changed. + +Click the **Asset definition** tab to see the code change that created this label. + + + +--- + + + + +**In the main branch**, we have a `customers` asset with a code version of `v1`: + +```python file=/dagster_cloud/branch_deployments/change_tracking_code_version.py startafter=start_main_deployment endbefore=end_main_deployment dedent=4 +@asset(code_version="v1") +def customers(): ... +``` + +**In the pull request**, `customers` is modified to change the code version to `v2`: + +```python file=/dagster_cloud/branch_deployments/change_tracking_code_version.py startafter=start_branch_deployment endbefore=end_branch_deployment dedent=4 +@asset(code_version="v2") +def customers(): ... +``` + +--- + + + + +### Upstream dependencies + +Change Tracking can detect when an asset's upstream dependencies have changed, whether they've been added or removed. + +**Note**: If an asset is marked as having changed dependencies, it means that the defining its upstream dependencies have changed. It doesn't mean that an upstream dependency has new data. + + + + +In this example, the `returns` asset has a **Changed in branch** label indicating it has changed dependencies. + +Click the **Asset definition** tab to see the code change that created this label. + + + +--- + + + + +**In the main branch**, we have a `returns` asset: + +```python file=/dagster_cloud/branch_deployments/change_tracking_dependencies.py startafter=start_main_deployment endbefore=end_main_deployment dedent=4 +@asset(deps=[orders]) +def returns(): ... +``` + +**In the pull request**, we added a new upstream dependency `customers`: + +```python file=/dagster_cloud/branch_deployments/change_tracking_dependencies.py startafter=start_branch_deployment endbefore=end_branch_deployment dedent=4 +@asset(deps=[orders, customers]) +def returns(): ... +``` + +--- + + + + +### Partitions definitions + +Change Tracking can detect if an asset's has been changed, whether it's been added, removed, or updated. + + + + +In this example, the `weekly_orders` asset has a **Changed in branch** label indicating a changed partitions definition. + +Click the **Asset definition** tab to see the code change that created this label. + + + + + + +**In the main branch**, we have a `weekly_orders` asset: + +```python file=/dagster_cloud/branch_deployments/change_tracking_partitions_definition.py startafter=start_main_deployment endbefore=end_main_deployment dedent=4 +@asset(partitions_def=WeeklyPartitionsDefinition(start_date="2024-01-01")) +def weekly_orders(): ... +``` + +**In the pull request**, we updated the to start one year earlier: + +```python file=/dagster_cloud/branch_deployments/change_tracking_partitions_definition.py startafter=start_branch_deployment endbefore=end_branch_deployment dedent=4 +@asset(partitions_def=WeeklyPartitionsDefinition(start_date="2023-01-01")) +def weekly_orders(): ... +``` + +--- + + + + +### Tags + +Change Tracking can detect when an [asset's tags](/concepts/metadata-tags/tags) have changed, whether they've been added, modified, or removed. + + + + +In this example, the `fruits_in_stock` asset has a **Changed in branch** label indicating it has changed tags. + +Click the **Asset definition** tab to see the code change that created this label. + + + +--- + + + + +**In the main branch**, we have a `fruits_in_stock` asset: + +```python file=/dagster_cloud/branch_deployments/change_tracking_tags.py startafter=start_main_deployment endbefore=end_main_deployment dedent=4 +@asset(tags={"section": "produce"}) +def fruits_in_stock(): ... +``` + +**In the pull request**, we added the `type: perishable` tag to `fruits_in_stock`: + +```python file=/dagster_cloud/branch_deployments/change_tracking_tags.py startafter=start_branch_deployment endbefore=end_branch_deployment dedent=4 +@asset(tags={"section": "produce", "type": "perishable"}) +def fruits_in_stock(): ... +``` + +--- + + + + +### Metadata + +Change Tracking can detect when an [asset's definition metadata](/concepts/metadata-tags/asset-metadata#attaching-definition-metadata) has changed, whether it's been added, modified, or removed. + + + + +In this example, the `produtcs` asset has a **Changed in branch** label indicating it has changed metadata. + +Click the **Asset definition** tab to see the code change that created this label. + + + +--- + + + + +**In the main branch**, we have a `products` asset: + +```python file=/dagster_cloud/branch_deployments/change_tracking_metadata.py startafter=start_main_deployment endbefore=end_main_deployment dedent=4 +@asset(metadata={"expected_columns": ["sku", "price", "supplier"]}) +def products(): ... +``` + +**In the pull request**, we update the value of the `expected_columns` metadata on `products`: + +```python file=/dagster_cloud/branch_deployments/change_tracking_metadata.py startafter=start_branch_deployment endbefore=end_branch_deployment dedent=4 +@asset(metadata={"expected_columns": ["sku", "price", "supplier", "backstock"]}) +def products(): ... +``` + + + + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx new file mode 100644 index 0000000000000..1a85e20c6ba2e --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github.mdx @@ -0,0 +1,231 @@ +--- +title: Using Branch Deployments (CI) with GitHub Actions | Dagster+ +description: Develop and test in the cloud. +--- + +# Using Branch Deployments (CI) with GitHub Actions + +This guide is applicable to Dagster+. + +In this guide, we'll walk you through setting up Continuous Integration (CI) using [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) with GitHub Actions. + +Using this approach to branch deployments may be a good fit if: + +- You use GitHub for version control +- You want Dagster to fully automate Branch Deployments + +**If you don't use GitHub for version control or want full control over your setup**, check out the [Branch deployments with the `dagster-cloud CLI` guide](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **Organization Admin** permissions in Dagster+ +- **The ability to run a new agent in your infrastructure**. This isn't required if you're using [Serverless deployment](/dagster-plus/deployment/serverless). +- **The ability to configure GitHub Actions for your repository**. This isn't required if you used the Dagster+ GitHub app to connect your repository as a [code location](/dagster-plus/managing-deployments/code-locations). + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Create and configure an agent + + + + If using{" "} + Serverless deployment + + , this step isn't required. + + + + +--- + +## Step 3: Add GitHub workflow files to your repository + + + If you used the GitHub app to add the repository as a code location, this step + isn't required.{" "} + Skip to the next step. + + +In this step, you'll add the required GitHub workflow files to your GitHub repository. The files can be found in our [Hybrid quickstart repository](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart) or [Serverless quickstart repository](https://github.com/dagster-io/dagster-cloud-serverless-quickstart), depending on the agent you are using. + +Copy the following files to your repository (the linked files are shown in the Hybrid repo, there are equivalents in the Serverless repo). + +- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) +- [`.github/workflows/dagster-cloud-deploy.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/dagster-cloud-deploy.yml) + +In the next step, you'll modify these files to work with your Dagster+ setup. + +--- + +## Step 4: Configure the GitHub repository + +In this section, you'll: + +1. [Add the agent registry to `dagster_cloud.yaml`](#step-41-add-the-agent-registry-to-dagster_cloudyaml) +2. [Configure GitHub Action secrets](#step-42-configure-github-action-secrets) +3. [Update GitHub Workflow files](#step-43-update-github-workflow-files) +4. [Verify Action runs](#step-44-verify-action-runs) + +### Step 4.1: Add the agent registry to dagster_cloud.yaml + + + If you're using Serverless deployment, this step isn't required.{" "} + Skip to the next step. + + +In the [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml), replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-create-and-configure-an-agent). + +For example: + +```yaml +# dagster_cloud.yaml + +locations: + - location_name: example_location + code_source: + python_file: repo.py + build: + directory: ./example_location + registry: 764506304434.dkr.ecr.us-east-1.amazonaws.com/branch-deployment-agent +``` + +### Step 4.2: Configure GitHub Action secrets + + + If you used the GitHub app to add the repository as a code location, this step{" "} + isn't required. +

    +

    + Want to use secrets in your Dagster code? Check out the + Dagster+ environment variables and secrets guide + for more info. +
    + + + +**For Hybrid deployments,** repeat steps 3-6 for each of the secrets required for your registry type: + + + +Select the tab for your registry type to view instructions. + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `ORGANIZATION_ID` - Your Dagster+ organization ID +- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) +- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) +- `AWS_REGION` - The AWS region where your ECR registry is located + +The **Actions secrets** page should look like the following: + + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `ORGANIZATION_ID` - Your Dagster+ organization ID +- `DOCKERHUB_USERNAME` - Your DockerHub username +- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) + +The **Actions secrets** page should look like the following: + + + + + + +### Step 4.3: Update GitHub Workflow files + + + If you're using Serverless deployment, this step isn't required.{" "} + Skip to the next step. + + +In this step, you'll update the GitHub Workflow files in the repository to set up Docker registry access. + +In the `.github/workflows/dagster-cloud-deploy.yml` file, uncomment the `step` associated with your registry. For example, for an Amazon ECR registry, you'd uncomment the following portion of the workflow file: + +```yaml +jobs: + dagster-cloud-deploy: + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} +``` + +Save and commit the file to the repository. + +### Step 4.4: Verify Action runs + +The last step is to verify that the Action runs successfully. + +1. In the repository, click the **Actions** tab. +2. In the list of workflows, locate the latest branch deployment run. For example: + + A successful run for a Branch Deployment Action + +If the run finished successfully, that's it! + +--- + +## Step 5: Access the branch deployment + +Now that Branch Deployments are configured, you can check out the preview in Dagster+. There are two ways to do this: + +- [From a pull request](#from-a-pull-request) +- [Directly in Dagster+](#directly-in-dagster) + +### From a pull request + +Every pull request in the repository contains a **View in Cloud** link: + + + +Clicking the link will open a branch deployment - or a preview of the changes - in Dagster+. + +### Directly in Dagster+ + +You can also access branch deployments directly in Dagster+ from the **deployment switcher**: + + diff --git a/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx new file mode 100644 index 0000000000000..1db16b2c3247b --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab.mdx @@ -0,0 +1,204 @@ +--- +title: Using Branch Deployments with Gitlab CI/CD | Dagster+ +description: Develop and test in the cloud. +--- + +# Using Branch Deployments with Gitlab CI/CD + +This guide is applicable to Dagster+. + +In this guide, we'll walk you through setting up [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) with Gitlab CI/CD. + +Using this approach to branch deployments may be a good fit if: + +- You use Gitlab for version control +- You want Dagster to fully automate Branch Deployments + +**If you don't use Gitlab for version control or want full control over your setup**, check out the [Branch deployments with the `dagster-cloud CLI` guide](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments). + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- **Organization Admin** permissions in Dagster+ +- **The ability to run a new agent in your infrastructure**. This isn't required if you're using [Serverless deployment](/dagster-plus/deployment/serverless). +- **The ability to configure Gitlab CI/CD for your project**. This isn't required if you used the Dagster+ Gitlab app to connect your project as a [code location](/dagster-plus/managing-deployments/code-locations). + +--- + +## Step 1: Generate a Dagster+ agent token + + + +--- + +## Step 2: Create and configure an agent + + + + If using{" "} + Serverless deployment + + , this step isn't required. + + + + +--- + +## Step 3: Add Gitlab CI/CD script to your project + + + If you used the Gitlab app to add the project as a code location, this step + isn't required.{" "} + Skip to the next step. + + +In this step, you'll add the required Gitlab CI config file to your Gitlab project. The file can be found in our [CI/CD workflow repository](https://github.com/dagster-io/dagster-cloud-action) or [Serverless quickstart repository](https://github.com/dagster-io/dagster-cloud-serverless-quickstart), depending on the agent you are using. + +Copy the following files to your project (the linked files are shown for Hybrid repos, there are equivalents for Serverless repos). + +- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) +- [`.gitlab-ci.yml`](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/hybrid-ci.yml) + +In the next step, you'll modify these files to work with your Dagster+ setup. + +--- + +## Step 4: Configure the Gitlab project + +In this section, you'll: + +1. [Add the agent registry to `dagster_cloud.yaml`](#step-41-add-the-agent-registry-to-dagster_cloudyaml) +2. [Configure Gitlab CI/CD variables](#step-42-configure-gitlab-cicd-variables) +3. [Update Gitlab CI/CD script](#step-43-update-gitlab-cicd-script) +4. [Verify CI/CD pipeline runs](#step-44-verify-pipeline-runs) + +### Step 4.1: Add the agent registry to dagster_cloud.yaml + + + If you're using Serverless deployment, this step isn't required.{" "} + Skip to the next step. + + +In the [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml), replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-create-and-configure-an-agent). + +For example: + +```yaml +# dagster_cloud.yaml + +locations: + - location_name: example_location + code_source: + python_file: repo.py + build: + directory: ./example_location + registry: 764506304434.dkr.ecr.us-east-1.amazonaws.com/branch-deployment-agent +``` + +### Step 4.2: Configure Gitlab CI/CD variables + + + If you used the Gitlab app to add the project as a code location, this step + isn't required. +

    +

    + Want to use secrets in your Dagster code? Check out the + Dagster+ environment variables and secrets guide + for more info. +
    + + + +**For Hybrid deployments,** repeat steps 3-6 for each of the secrets required for your registry type: + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (e.g. `https://my_org.dagster.cloud`) + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (e.g. `https://my_org.dagster.cloud`) +- `DOCKERHUB_USERNAME` - Your DockerHub username +- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (e.g. `https://my_org.dagster.cloud`) +- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) +- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 2](#step-2-create-and-configure-an-agent) +- `AWS_REGION` - The AWS region where your ECR registry is located + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 1](#step-1-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (e.g. `https://my_org.dagster.cloud`) +- `GCR_JSON_KEY` - Your GCR JSON credentials + + + + + +### Step 4.3: Update Gitlab CI/CD script + + + If you're using Serverless deployment, this step isn't required.{" "} + Skip to the next step. + + +In this step, you'll update the Gitlab CI/CD config to set up Docker registry access. + +In the `.gitlab-ci.yml` file, uncomment the `step` associated with your registry. For example, for the Gitlab container registry, you'd uncomment the following portion of the `.gitlab-ci.yml` file: + +```yaml +build-image: + ... + before_script: + # For Gitlab Container Registry + - echo $CI_JOB_TOKEN | docker login --username $CI_REGISTRY_USER --password-stdin $REGISTRY_URL +``` + +Save and commit the files to the project. + +### Step 4.4: Verify pipeline runs + +The last step is to verify that the Gitlab pipeline runs successfully. + +1. On the project page, click the **CI/CD** tab. +2. In the list of pipelines, locate the latest branch deployment run. For example: + + A successful run for a Branch Deployment Action + +If the run finished successfully, that's it! + +--- + +## Step 5: Access the branch deployment + +Now that Branch Deployments are configured, you can check out the preview in Dagster+, by accessing the branch deployment from the **deployment switcher**: + + diff --git a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments.mdx b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments.mdx similarity index 75% rename from docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments.mdx rename to docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments.mdx index bd7b83686419e..7fcf0b877eee8 100644 --- a/docs/content/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments.mdx +++ b/docs/content/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments.mdx @@ -1,13 +1,13 @@ --- -title: Using Branch Deployments (CI) with the dagster-cloud CLI | Dagster Cloud +title: Using Branch Deployments (CI) with the dagster-cloud CLI | Dagster+ description: Develop and test in the cloud. --- # Using Branch Deployments (CI) with the dagster-cloud CLI -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. -In this guide, we'll walk you through setting up Continuous Integration (CI) [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) with a general continuous integration (CI) or git platform, using the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli). +In this guide, we'll walk you through setting up Continuous Integration (CI) [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) with a general continuous integration (CI) or git platform, using the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli). Using this approach to branch deployments may be a good fit if: @@ -15,7 +15,7 @@ Using this approach to branch deployments may be a good fit if: - You use an alternate CI platform - You want full control over Branch Deployment configuration -**If you use GitHub for version control or want Dagster to automate branch deployments**, check out the dedicated [Branch deployments with GitHub guide](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github). +**If you use GitHub for version control or want Dagster to automate branch deployments**, check out the dedicated [Branch deployments with GitHub guide](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github). --- @@ -23,14 +23,14 @@ Using this approach to branch deployments may be a good fit if: Utilizing Branch Deployments requires setting up two components: the Branch Deployment agent and CI platform. You'll need: -- **Organization Admin** permissions in Dagster Cloud -- To install the [`dagster-cloud` CLI](/dagster-cloud/managing-deployments/dagster-cloud-cli) -- The ability to run a new agent in your infrastructure +- **Organization Admin** permissions in Dagster+ +- To install the [`dagster-cloud` CLI](/dagster-plus/managing-deployments/dagster-plus-cli) - The ability to configure your CI platform +- The ability to run a new agent in your infrastructure. This isn't required if you're using [Serverless deployment](/dagster-plus/deployment/serverless). --- -## Step 1: Generate a Dagster Cloud agent token +## Step 1: Generate a Dagster+ agent token @@ -38,15 +38,23 @@ Utilizing Branch Deployments requires setting up two components: the Branch Depl ## Step 2: Create and configure an agent + + If you're using Serverless deployment, this step isn't required.{" "} + + Skip to Step 3 + + . + + --- ## Step 3: Create a branch deployment using the dagster-cloud CLI -Next, you'll create a branch deployment using the `dagster-cloud` CLI. When the state of a branch or merge request is updated, Dagster Cloud first expects these steps to occur: +Next, you'll create a branch deployment using the `dagster-cloud` CLI. When the state of a branch or merge request is updated, Dagster+ first expects these steps to occur: -1. A new image containing your code and requirements is built on the branch. Refer to the [Dagster code requirements guide](/dagster-cloud/managing-deployments/code-locations). +1. A new image containing your code and requirements is built on the branch. Refer to the [Dagster code requirements guide](/dagster-plus/managing-deployments/code-locations). 2. The new image is pushed to a Docker registry accessible to your agent. **Note**: The following examples assume the registry URL and image tag are stored in the `LOCATION_REGISTRY_URL` and `IMAGE_TAG` environment variables, respectively. @@ -66,7 +74,7 @@ After the above has occurred: ) ``` - One or more additional parameters can optionally be supplied to the `create-or-update` command to enhance the Branch Deployments UI in Dagster Cloud: + One or more additional parameters can optionally be supplied to the `create-or-update` command to enhance the Branch Deployments UI in Dagster+: ```shell BRANCH_DEPLOYMENT_NAME=$( @@ -86,6 +94,7 @@ After the above has occurred: --author-name $NAME \ # A display name for the latest commit's author --author-email $EMAIL \ # An email for the latest commit's author --author-avatar-url $AVATAR_URL # An avatar URL for the latest commit's author + --base-deployment-name $BASE_DEPLOYMENT_NAME # The main deployment that will be compared against. Default is 'prod' ) ``` @@ -106,7 +115,7 @@ After the above has occurred: 2. Deploy the code to the branch deployment: ```shell - dagster-cloud workspace add-location \ + dagster-cloud deployment add-location \ --organization $ORGANIZATION_NAME \ --deployment $BRANCH_DEPLOYMENT_NAME \ --api-token $DAGSTER_CLOUD_API_TOKEN \ @@ -117,18 +126,18 @@ After the above has occurred: --git-url "${GIT_URL}" ``` - Refer to the [Code location guide](/dagster-cloud/managing-deployments/code-locations) for more info on how a location's details are specified. + Refer to the [Code location guide](/dagster-plus/managing-deployments/code-locations) for more info on how a location's details are specified. --- ## Step 4: Access the branch deployment -Now that Branch Deployments are configured, you can check out the preview in Dagster Cloud. +Now that Branch Deployments are configured, you can check out the preview in Dagster+. Click the **deployment switcher** to view your workspace's branch deployments: This guide is applicable to Dagster+. + +A code location specifies a single Python package or file that defines your Dagster code. When you add a code location in Dagster+, you're instructing your deployment's agent where to find your code. This applies to both Hybrid and Serverless deployments. + +Dagster+ agents use this location configuration to load your code and upload metadata about your jobs. Each full deployment - for example, `prod` - can include code from one or more code locations. + +By the end of this guide, you'll understand: + +- The requirements for Dagster code to work with Dagster+ +- How to interact with code locations using the Dagster UI +- How to interact with code locations using the dagster-cloud CLI + +--- + +## Dagster+ code requirements + + + Learn by example? Check out the{" "} + + example repo + + , which is set up to run in Dagster+. + + +To work with Dagster+, your Dagster code: + +- **Must be loaded from a single entry point, either a Python file or package.** This entry point can load repositories from other files or packages. + +- **Must run in an environment where the `dagster` and [`dagster-cloud`](/dagster-plus/managing-deployments/dagster-plus-cli) 0.13.2 or later Python packages are installed.** + +- **If using [Hybrid Deployment](/dagster-plus/deployment/hybrid)**: + + - **And you're using an Amazon Elastic Container Service (ECS), Kubernetes, or Docker agent**, your code must be packaged into a Docker image and pushed to a registry your agent can access. Dagster+ doesn't need access to your image - your agent only needs to be able to pull it. + + Additionally, the Dockerfile for your image doesn't need to specify an entry point or command. These will be supplied by the agent when it runs your code using your supplied image. + + - **And you're using a local agent**, your code must be in a Python environment that can be accessed on the same machine as your agent. + +Additionally, note that: + +- Your code doesn't need to use the same version of Dagster as your agent +- Different code locations can use different versions of Dagster +- Dagster+ doesn't require a [`workspace.yaml` file](/concepts/code-locations/workspace-files). You can still create a `workspace.yaml` file to load your code in an open source Dagster webserver instance, but doing so won't affect how your code is loaded in Dagster+. + +--- + +## Interacting with code locations + + + Editor, Admin, or{" "} + Organization Admin permissions are required to manage code + locations in Dagster+. +
    +
    + If you're an Editor or Admin, you can only manage + code locations in deployments where you're an Editor or + Admin + . +
    + +Interacting with code locations can be done using the Dagster+ UI or the [dagster-cloud CLI](/dagster-plus/managing-deployments/dagster-plus-cli). The CLI commands perform the same underlying operations as those in the **Deployment** tab of the UI. + +### Adding code locations + + + + +For **Serverless deployments**, there are two ways to deploy your code to Dagster+: + +- **Start from a template**- Use one of our quickstart templates to get up and running. All templates come with CI/CD already configured and will be cloned to a new git repository. + +- **Import an existing project** - Import an existing git repository using our GitHub or GitLab integrations or the [dagster-cloud CLI](/dagster-plus/managing-deployments/dagster-plus-cli). **Note**: If using the GitHub or GitLab integrations, Dagster+ will: + + - Add a [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) to the repository if one doesn't already exist, and + - Set up [CI/CD](/dagster-plus/references/ci-cd-file-reference) + +Select an option in the UI and follow the prompts. + + + + +1. Sign in to your Dagster+ account. + +2. Click **Deployment**. + +3. Click **+ Add code location**. This will open a YAML editor with a schema describing the acceptable fields: + + Add Code Location Config Editor + +4. In the editor, define the code location's configuration. Refer to the [`dagster_cloud.yaml` reference](/dagster-plus/managing-deployments/dagster-cloud-yaml) for information about the properties in the configuration. + + For example, the following config specifies that a code location should include a secret named `my_secret` and run in a k8s namespace (`my_namespace`) whenever the Kubernetes agent creates a pod for the location: + + ```yaml + location_name: cloud-examples + image: dagster/dagster-cloud-examples:latest + code_source: + package_name: dagster_cloud_examples + container_context: + k8s: + namespace: my_namespace + env_secrets: + - my_secret + ``` + +5. When finished, click **Add code location**. + +The agent will attempt to load your code and send its metadata to Dagster+. **Note**: This may take some time. + +Once your code has loaded, the location will show a green **Loaded** status and jobs will appear in Dagster+. If the agent is unable to load your code, the location will show an error with more information. + + + + +Code locations can be added using the `deployment add-location` command. For example, to add Dagster+'s public example image, run: + +```shell +# Set up YAML file for example location +cat > example_location.yaml < + + +--- + +### Modifying code locations + + + + +To modify a code location, click the **dropdown menu** to the right of the location. In the menu, click **Modify**: + + + +After a code location is updated, the agent will perform a rolling update of your code and jobs will update in Dagster+. **Note**: Updating code won't interrupt any currently launched runs. + + + + + + Heads up! When using this command, the full set of + information about the location must be specified even if only one piece of + configuration is modified. + + +```shell +dagster-cloud deployment update-location [LOCATION_NAME] +``` + +To see all available options, run: + +```shell +dagster-cloud deployment update-location -h +``` + + + + +--- + +### Redeploying code locations + +To reload your code and upload job metadata to Dagster+ without modifying the code location, click the **Redeploy** button: + + + +For example, if the agent was unable to pull your image due to a permissions issue that's since been addressed, clicking **Redeploy** will tell the agent to try again. + +--- + +### Deleting code locations + + + + +To delete a code location, click the **dropdown menu** to the right of the location. In the menu, click **Remove**: + + + +When prompted, confirm the deletion. + + + + +```shell +dagster-cloud deployment delete-location [LOCATION_NAME] +``` + +To see all available options, run: + +```shell +dagster-cloud deployment delete-location -h +``` + + + + +--- + +### Syncing the workspace with the CLI + +The YAML configuration for all code locations in a workspace can be kept in a [`dagster_cloud.yaml`](/dagster-plus/managing-deployments/dagster-cloud-yaml) file and reconciled with the workspace config in Dagster+ using the `deployment sync-locations` command. + +For example, if you have the following `dagster_cloud.yaml` file: + +```yaml caption=dagster_cloud.yaml +locations: + - location_name: machine-learning + image: myregistry/dagster-machine-learning:mytag + code_source: + package_name: dagster_cloud_machine_learning + executable_path: /my/folder/python_executable + attribute: my_repo + - location_name: data-eng + image: myregistry/dagster-data-eng:myothertag + code_source: + python_file: repo.py + working_directory: /my/folder/working_dir/ +``` + +Run the following to reconcile the file with Dagster+'s remote workspace: + +```shell +dagster-cloud deployment sync-locations -w dagster_cloud.yaml +``` + +To see all available options, run: + +```shell +dagster-cloud deployment sync-locations -h +``` + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/controlling-logs.mdx b/docs/content/dagster-plus/managing-deployments/controlling-logs.mdx new file mode 100644 index 0000000000000..d8d61bd95d2fe --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/controlling-logs.mdx @@ -0,0 +1,96 @@ +--- +title: "Managing Compute Logs and Error Messages in Dagster+ | Dagster Docs" +--- + +# Managing compute logs and error messages in Dagster+ + +This guide is applicable to Dagster+. + +In this guide, we'll cover how to adjust where Dagster+ compute logs are stored and manage masking of error messages in the Dagster+ UI. + +By default, Dagster+ ingests [structured event logs and compute logs](/concepts/logging#log-types) from runs and surfaces error messages from [code locations](/concepts/code-locations) in the UI. + +Depending on your organization's needs, you may want to retain these logs in your own infrastructure or mask error message contents. + +--- + +## Modifying compute log storage + +Dagster's compute logs are handled by the configured [`ComputeLogManager`](/\_apidocs/internals#dagster.\_core.storage.compute_log_manager.ComputeLogManager). By default, Dagster+ utilizes the `CloudComputeLogManager` which stores logs in a Dagster+-managed Amazon S3 bucket, but you can customize this behavior to store logs in a destination of your choice. + +### Writing to your own S3 bucket + +If using the Kubernetes agent, you can instead forward logs to your own S3 bucket by using the [`S3ComputeLogManager`](/\_apidocs/libraries/dagster-aws#dagster_aws.s3.S3ComputeLogManager). + +You can configure the `S3ComputeLogManager` in your [`dagster.yaml` file](/dagster-plus/deployment/agents/customizing-configuration): + +```yaml +compute_logs: + module: dagster_aws.s3.compute_log_manager + class: S3ComputeLogManager + config: + show_url_only: true + bucket: your-compute-log-storage-bucket + region: your-bucket-region +``` + +If you are using Helm to deploy the Kubernetes agent, you can provide the following configuration in your `values.yaml` file: + +```yaml +computeLogs: + enabled: true + custom: + module: dagster_aws.s3.compute_log_manager + class: S3ComputeLogManager + config: + show_url_only: true + bucket: your-compute-log-storage-bucket + region: your-bucket-region +``` + +### Disabling compute log upload + +If your organization has its own logging solution which ingests `stdout` and `stderr` from your compute environment, you may want to disable compute log upload entirely. You can do this with the . + +You can configure the `NoOpComputeLogManager` in your [`dagster.yaml` file](/dagster-plus/deployment/agents/customizing-configuration): + +```yaml +compute_logs: + module: dagster.core.storage.noop_compute_log_manager + class: NoOpComputeLogManager +``` + +If you are using Helm to deploy the Kubernetes agent, use the `enabled` flag to disable compute log upload: + +```yaml +computeLogs: + enabled: false +``` + +### Other compute log storage options + +For a full list of available compute log storage options, refer to the [Compute log storage docs](/deployment/dagster-instance#compute-log-storage). + +--- + +## Masking error messages + +By default, Dagster+ surfaces error messages from your code locations in the UI, including when runs fail, sensors or schedules throw an exception, or code locations fail to load. You can mask these error messages in the case that their contents are sensitive. + +To mask error messages in a Dagster+ Deployment, set the environment variable `DAGSTER_REDACT_USER_CODE_ERRORS` equal to `1` using the [**Environment variables** page](/dagster-plus/managing-deployments/environment-variables-and-secrets) in the UI: + + + +Once set, error messages from your code locations will be masked in the UI. A unique error ID will be generated, which you can use to look up the error message in your own logs. This error ID will appear in place of the error message in UI dialogs or in a run's event logs. + + diff --git a/docs/content/dagster-cloud/managing-deployments/dagster-cloud-yaml.mdx b/docs/content/dagster-plus/managing-deployments/dagster-cloud-yaml.mdx similarity index 86% rename from docs/content/dagster-cloud/managing-deployments/dagster-cloud-yaml.mdx rename to docs/content/dagster-plus/managing-deployments/dagster-cloud-yaml.mdx index da3916819a762..9310a27495f24 100644 --- a/docs/content/dagster-cloud/managing-deployments/dagster-cloud-yaml.mdx +++ b/docs/content/dagster-plus/managing-deployments/dagster-cloud-yaml.mdx @@ -1,11 +1,11 @@ --- title: dagster_cloud.yaml reference | Dagster Docs -description: Use dagster_cloud.yaml to configure code locations for Dagster Cloud +description: Use dagster_cloud.yaml to configure code locations for Dagster+ --- # dagster_cloud.yaml -This reference is applicable to Dagster Cloud. +This reference is applicable to Dagster+. Required - + @@ -68,8 +68,8 @@ description: Use dagster_cloud.yaml to configure code locations for Dagster Clou @@ -132,9 +132,9 @@ These methods of limiting concurrency can be used individually or together. For ### Limiting overall runs -How you limit the overall number of concurrent runs in a deployment depends on whether you're using Dagster Cloud or Dagster Open Source: +How you limit the overall number of concurrent runs in a deployment depends on whether you're using Dagster+ or Dagster Open Source: -- **Dagster Cloud**: Use the [Dagster Cloud UI or the dagster-cloud CLI][cloud-deployment-settings] +- **Dagster+**: Use the [Dagster+ UI or the `dagster-cloud` CLI][cloud-deployment-settings] - **Dagster Open Source**: Use your instance's `dagster.yaml` To enable this limit, use `run_queue.max_concurrent_runs`. For example, the following would limit the number of concurrent runs for the deployment to 15: @@ -147,7 +147,7 @@ run_queue: When defining a value for `max_concurrent_runs`, keep the following in mind: - This setting defaults to `10` -- Disable the setting with a `-1` value, which means no limit will be enforced. **Note:** All other negative numbers are invalid, and disabling this setting isn't supported for Dagster Cloud. +- Disable the setting with a `-1` value, which means no limit will be enforced. **Note:** All other negative numbers are invalid, and disabling this setting isn't supported for Dagster+. - A value of `0` prevents any runs from launching @@ -155,9 +155,9 @@ When defining a value for `max_concurrent_runs`, keep the following in mind: ### Limiting specific runs using tags -How you limit specific runs based on tags depends on whether you're using Dagster Cloud or Dagster Open Source: +How you limit specific runs based on tags depends on whether you're using Dagster+ or Dagster Open Source: -- **Dagster Cloud**: Use the [Dagster Cloud UI or the dagster-cloud CLI][cloud-deployment-settings] +- **Dagster+**: Use the [Dagster+ UI or the `dagster-cloud` CLI][cloud-deployment-settings] - **Dagster Open Source**: Use your instance's `dagster.yaml` To enable this limit, use `run_queue.tag_concurrency_limits`. This key accepts a list of tags and their corresponding concurrency limits. @@ -277,8 +277,7 @@ Op-based jobs are defined using the decorato } } ) -def tag_concurrency_job(): - ... +def tag_concurrency_job(): ... ``` @@ -370,8 +369,7 @@ In this example, using the - + -If runs aren't being dequeued in Dagster Cloud, the root causes could be: +If runs aren't being dequeued in Dagster+, the root causes could be: -- **If using a [Hybrid deployment](/dagster-cloud/deployment/hybrid)**, the agent serving the deployment may be down. In this situation, runs will be paused. -- **Dagster Cloud is experiencing downtime**. Check the [status page](http://dagstercloud.statuspage.io/) for the latest on potential outages. +- **If using a [Hybrid deployment](/dagster-plus/deployment/hybrid)**, the agent serving the deployment may be down. In this situation, runs will be paused. +- **Dagster+ is experiencing downtime**. Check the [status page](http://dagstercloud.statuspage.io/) for the latest on potential outages. @@ -517,8 +563,8 @@ If runs aren’t being dequeued in Dagster Open Source, the root cause is likely href="/concepts/assets/software-defined-assets" > decorator. For example, given the Airflow task below: + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_simple_airflow_task endbefore=end_simple_airflow_task dedent=4 +from airflow.decorators import task + +@task() +def extract(): + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + order_data_dict = json.loads(data_string) + return order_data_dict +``` + +This can be directly translated to a Dagster asset like so. + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_simple_dagster_asset endbefore=end_simple_dagster_asset dedent=4 +from dagster import asset + +@asset +def extract(): + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + order_data_dict = json.loads(data_string) + return order_data_dict +``` + +Now, let’s walk through the full `tutorial_taskflow_api.py` example DAG, and how it would be translated to Dagster assets. + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_full_airflow_example endbefore=end_full_airflow_example dedent=4 +import json + +import pendulum + +from airflow.decorators import dag, task + +@dag( + schedule=None, + start_date=pendulum.datetime(2021, 1, 1, tz="UTC"), + catchup=False, + tags=["example"], +) +def tutorial_taskflow_api(): + """### TaskFlow API Tutorial Documentation + This is a simple data pipeline example which demonstrates the use of + the TaskFlow API using three simple tasks for Extract, Transform, and Load. + Documentation that goes along with the Airflow TaskFlow API tutorial is + located + [here](https://airflow.apache.org/docs/apache-airflow/stable/tutorial_taskflow_api.html) + """ + + @task + def extract(): + """#### Extract task + A simple Extract task to get data ready for the rest of the data + pipeline. In this case, getting data is simulated by reading from a + hardcoded JSON string. + """ + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + + order_data_dict = json.loads(data_string) + return order_data_dict + + @task(multiple_outputs=True) + def transform(order_data_dict: dict): + """#### Transform task + A simple Transform task which takes in the collection of order data and + computes the total order value. + """ + total_order_value = 0 + + for value in order_data_dict.values(): + total_order_value += value + + return {"total_order_value": total_order_value} + + @task + def load(total_order_value: float): + """#### Load task + A simple Load task which takes in the result of the Transform task and + instead of saving it to end user review, just prints it out. + """ + print(f"Total order value is: {total_order_value:.2f}") + + order_data = extract() + order_summary = transform(order_data) + load(order_summary["total_order_value"]) + +tutorial_taskflow_api() +``` + +By converting the Airflow `task` to a Dagster , and our Airflow `dag` to a Dagster , the resulting code will look like the following. + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_full_dagster_example endbefore=end_full_dagster_example dedent=4 +import json + +from dagster import AssetExecutionContext, Definitions, define_asset_job, asset + +@asset +def extract(): + """Extract task + + A simple Extract task to get data ready for the rest of the data pipeline. In this case, getting + data is simulated by reading from a hardcoded JSON string. + """ + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + + order_data_dict = json.loads(data_string) + + return order_data_dict + +@asset +def transform(extract): + """Transform task + + A simple Transform task which takes in the collection of order data and computes the total order + value. + """ + total_order_value = 0 + + for value in extract.values(): + total_order_value += value + + return total_order_value + +@asset +def load(context: AssetExecutionContext, transform): + """Load task + + A simple Load task which takes in the result of the Transform task and instead of saving it to + end user review, just prints it out. + """ + context.log.info(f"Total order value is: {transform:.2f}") + +airflow_taskflow_example = define_asset_job( + name="airflow_taskflow_example", selection=[extract, transform, load] +) + +defs = Definitions( + assets=[extract, transform, load], jobs=[airflow_taskflow_example] +) +``` + +In this example, we are using to define a job in which the selected assets are materialized. Using the `selection` parameter of the function, we specify that we want our `extract`, `transform`, and `load` assets to be materialized. The lineage of dependencies between the assets are automatically determined through the passing of one asset as a parameter to another. + +Finally, we create a object to register our assets and job and load them by the Dagster tool. + +--- + +## Migrating containerized pipelines + +If you've elected to containerize your Airflow pipelines by using technologies like Kubernetes using the `KubernetesPodOperator`, or Elastic Container Service using the `EcsRunTaskOperator`, you'll need a different approach to migration. + +In these cases, we recommend leveraging Dagster Pipes for running these external execution environments from Dagster. Refer to the [Dagster Pipes documentation](/concepts/dagster-pipes) for more information. + +Some benefits of containerizing your pipelines are as follows: + +- Dependencies are isolated between execution environments +- Compute requirements can be easily modified per pipeline (computer, memory, GPU requirements, and so on) +- Pipelines can be language agnostic, allowing you to use R, Rust, Go, and so on +- Vendor lock-in is limited, and pipelines can be easily migrated to other platforms +- Pipelines can be versioned using tags on the image repository + +Let’s walk through an example of how a containerized pipeline can be run from Airflow, and then let’s walk through how the same would be done in Dagster. Imagine you have a Dockerized pipeline deployed to your registry of choice with an image named `example-data-pipeline`. In Apache Airflow, you would be able to run the image of that image by using the `KubernetesPodOperator`. + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_run_docker_image_with_airflow endbefore=end_run_docker_image_with_airflow dedent=4 +from airflow import DAG +from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import ( + KubernetesPodOperator, +) +from pendulum import datetime + +with DAG( + dag_id="example_kubernetes_dag", schedule_interval=None, catchup=False +) as dag: + KubernetesPodOperator( + image="example-data-pipeline:latest", + name="example-kubernetes-task", + task_id="example-kubernetes-task", + get_logs=True, + ) +``` + +Now, let's look at how the same image would be run on Kubernetes using Dagster Pipes and the `dagster_k8s` wrapper. + +```python file=guides/migrations/migrating_airflow_to_dagster.py startafter=start_run_docker_image_with_dagster_pipes endbefore=end_run_docker_image_with_dagster_pipes dedent=4 +from dagster import AssetExecutionContext, asset +from dagster_k8s import PipesK8sClient + +@asset +def k8s_pipes_asset( + context: AssetExecutionContext, k8s_pipes_client: PipesK8sClient +): + return k8s_pipes_client.run( + context=context, + image="example-data-pipeline:latest", + base_pod_spec={ + "containers": [ + { + "name": "data-processing-rs", + "image": "data-processing-rs", + } + ] + }, + ).get_materialize_result() +``` + +The primary difference between Airflow and Dagster are how the k8s pod specifications are exposed. In Airflow, they are passed as parameters to the `KubernetesPodOperator`, whereas in Dagster they are passed as a `base_pod_spec` dictionary to the `k8s_pipes_client.run` method. Additionally, in Airflow, `get_logs` is required to capture `stdout`. In Dagster, however, they are automatically captured on the `stdout` tab of the step output. + +The primary difference between Airflow and Dagster are how the k8s pod specification are expose. In Airflow, they are passed as parameters the `KubernetesPodOperator`, whereas in Dagster they are passed as a `base_pod_spec` dictionary to the `k8s_pipes_client.run` method. Another difference is that in Airflow, `get_logs` is required to capture _stdout_, however, with Dagster they are automatically captured to on the _stdout_ tab of the step output. + +In the above example, we demonstrated how to run images on Kubernetes using the `dagster_k8s` library. One of the biggest benefits of Dagster Pipes, however, is that you can leverage the `dagster_pipes` library from within your containerized pipeline to access the full Dagster context, and emit events back to the Dagster UI. + +In the above example, we demonstrated how to run images on Kubernetes using the `dagsater_k8s` library. One of the biggest benefits of Dagster Pipes, however, is that you can leverage the `dagster_pipes` library from within your containerized pipeline to access the full Dagster context, and emit events back to the Dagster UI. + +A common pattern when building containerized pipelines is to accept a large number of command-line arguments using libraries like `argparse`. However, with Dagster you can pass a dictionary of parameters on the Dagster context using the `extras` parameter. Then, in your pipeline code, you can access the context `PipesContext.get()` if you are using Python. + +For a step-by-step walkthrough of using Dagster Pipes, refer to the [Dagster Pipes tutorial](https://docs.dagster.io/concepts/dagster-pipes/subprocess). diff --git a/docs/content/guides/migrations/observe-your-airflow-pipelines-with-dagster.mdx b/docs/content/guides/migrations/observe-your-airflow-pipelines-with-dagster.mdx new file mode 100644 index 0000000000000..ed2f0fe3cbfc6 --- /dev/null +++ b/docs/content/guides/migrations/observe-your-airflow-pipelines-with-dagster.mdx @@ -0,0 +1,105 @@ +--- +title: "Observe your Airflow pipelines with Dagster | Dagster Docs" +description: "Learn how to leverage the features of Dagster and Airflow together." +--- + +# Observe your Airflow pipelines with Dagster + +Dagster can act as a single entry point to all orchestration platforms in use at your organization. By injecting a small amount of code into your existing pipelines, you can report events to Dagster, where you can then visualize the full lineage of pipelines. This can be particularly useful if you have multiple Apache Airflow environments, and hope to build a catalog and observation platform through Dagster. + +## Emitting materialization events from Airflow to Dagster + +Imagine you have a large number of pipelines written in Apache Airflow and wish to introduce Dagster into your stack. By using custom Airflow operators, you can continue to run your existing pipelines while you work toward migrating them off Airflow, or while building new pipelines in Dagster that are tightly integrated with your legacy systems. + +To do this, we will define a `DagsterAssetOperator` operator downstream of your Airflow DAG to indicate that the pipeline's processing has concluded. The HTTP endpoint of the Dagster server, the `asset_key`, and additional metadata and descriptions are to be specified to inform Dagster of the materialization. + +```python +from typing import Dict, Optional + +from airflow.models import BaseOperator +from airflow.utils.decorators import apply_defaults +import requests + +class DagsterAssetOperator(BaseOperator): + @apply_defaults + def __init__( + self, + dagster_webserver_host: str, + dagster_webserver_port: str, + asset_key: str, + metadata: Optional[Dict] = None, + description: Optional[str] = None, + *args, + **kwargs, + ): + super().__init__(*args, **kwargs) + self.dagster_webserver_host = dagster_webserver_host + self.dagster_webserver_port = dagster_webserver_port + self.asset_key = asset_key + self.metadata = metadata or {} + self.description = description + + def execute(self, context): + url = f"http://{dagster_webserver_host}:{dagster_webserver_port}/report_asset_materialization/{self.asset_key}" + payload = {"metadata": self.metadata, "description": self.description} + headers = {"Content-Type": "application/json"} + + response = requests.post(url, json=payload, headers=headers) + response.raise_for_status() + + self.log.info( + f"Reported asset materialization to Dagster. Response: {response.text}" + ) +``` + +Then, we can append this to our Airflow DAG to indicate that a pipeline has run successfully. + +```python +import os + +dagster_webserver_host = os.environ.get("DAGSTER_WEBSERVER_HOST", "localhost") +dagster_webserver_port = os.environ.get("DAGSTER_WEBSERVER_PORT", "3000") + +dagster_op = DagsterAssetOperator( + task_id="report_dagster_asset_materialization", + dagster_webserver_host=dagster_webserver_host, + dagster_webserver_port=dagster_webserver_port, + asset_key="example_external_airflow_asset", + metadata={"airflow/tag": "example", "source": "external"}, +) +``` + +Once the events are emitted from Airflow, there are two options for scheduling Dagster materializations following the external Airflow materialization event: asset sensors and auto materialization policies. + +An external asset is created in Dagster, and an `asset_sensor` is used to identify the materialization events that are being sent from Airflow. + +```python +from dagster import external_asset_from_spec + +example_external_airflow_asset = external_asset_from_spec( + AssetSpec("example_external_airflow_asset", + group_name="External") +) +``` + +```python +from dagster import ( + AssetKey, + EventLogEntry, + RunRequest, + SensorEvaluationContext, + asset_sensor +) + +@asset_sensor( + asset_key=AssetKey("example_external_airflow_asset"), + job=example_external_airflow_asset_job +) +def example_external_airflow_asset_sensor( + context: SensorEvaluationContext, asset_event: EventLogEntry +): + assert asset_event.dagster_event and asset_event.dagster_event.asset_key + yield RunRequest(run_key=context.cursor) +``` + +Now, when a materialization event occurs on the external `example_external_airflow_asset` asset, the `example_external_airflow_asset_job` job will be triggered. Here, you can define logic that can build upon the DAG from your Airflow environment. diff --git a/docs/content/guides/running-dagster-locally.mdx b/docs/content/guides/running-dagster-locally.mdx index 4628d5073c2a2..03d15b0ce222e 100644 --- a/docs/content/guides/running-dagster-locally.mdx +++ b/docs/content/guides/running-dagster-locally.mdx @@ -58,9 +58,9 @@ For the full list of options that can be set in the `dagster.yaml` file, refer t ## Moving to production -`dagster dev` is primarily useful for running Dagster for local development and testing, but isn't suitable for the demands of most production deployments. For example, in a production deployment, you might want to run multiple webserver replicas, have zero downtime continuous deployment of your code, or set up your Dagster daemon to automatically restart if it crashes. +`dagster dev` is primarily useful for running Dagster for local development and testing. It isn't suitable for the demands of most production deployments. Most importantly, `dagster dev` does not include authentication or web security. Additionally, in a production deployment, you might want to run multiple webserver replicas, have zero downtime continuous deployment of your code, or set up your Dagster daemon to automatically restart if it crashes. -For information about deploying Dagster in production, refer to the [Deploying Dagster guides](/deployment/open-source#deploying-dagster). +For information about deploying Dagster in production, refer to the [Open Source Deployment guides](/deployment#deploying-dagster). --- @@ -76,8 +76,8 @@ For information about deploying Dagster in production, refer to the [Deploying D href="/deployment/guides" > + + + +
    Required for CloudRequired for Dagster+
    {" "} Similar to the workspace.yaml in open source to define code - locations for Dagster Cloud. + locations for Dagster+.
    {" "} - Defines multiple code locations for Dagster Cloud. For Hybrid deployments, - this file can be used + Defines multiple code locations for Dagster+. For Hybrid deployments, this + file can be used to manage environment variables/secrets.
      @@ -82,7 +82,7 @@ description: Use dagster_cloud.yaml to configure code locations for Dagster Clou ## File location -The `dagster_cloud.yaml` file should be placed in the root of your Dagster project. Below is an example of a file structure modified from the [Dagster Cloud ETL quickstart](https://github.com/dagster-io/dagster/tree/master/examples/quickstart_etl). +The `dagster_cloud.yaml` file should be placed in the root of your Dagster project. Below is an example of a file structure modified from the [Dagster+ ETL quickstart](https://github.com/dagster-io/dagster/tree/master/examples/quickstart_etl). ```shell quickstart_etl @@ -332,9 +332,9 @@ locations: ### Python executable -For Dagster Cloud Hybrid deployments, the Python executable that is installed globally in the image, or the default Python executable on the local system if you use the local agent, will be used. To use a different Python executable, specify it using the `executable_path` setting. It can be useful to have different Python executables for different code locations. +For Dagster+ Hybrid deployments, the Python executable that is installed globally in the image, or the default Python executable on the local system if you use the local agent, will be used. To use a different Python executable, specify it using the `executable_path` setting. It can be useful to have different Python executables for different code locations. -For Dagster Cloud Serverless deployments, you can specify a different Python version by [following these instructions](/dagster-cloud/deployment/serverless#using-a-different-python-version). +For Dagster+ Serverless deployments, you can specify a different Python version by [following these instructions](/dagster-plus/deployment/serverless#using-a-different-python-version). ```yaml # dagster_cloud.yaml @@ -367,6 +367,6 @@ If using Hybrid deployment, you can define additional configuration options for Refer to the configuration reference for your agent for more info: -- [Docker agent configuration reference](/dagster-cloud/deployment/agents/docker/configuration-reference) -- [Amazon ECS agent configuration reference](/dagster-cloud/deployment/agents/amazon-ecs/configuration-reference) -- [Kubernetes agent configuration reference](/dagster-cloud/deployment/agents/kubernetes/configuration-reference) +- [Docker agent configuration reference](/dagster-plus/deployment/agents/docker/configuration-reference) +- [Amazon ECS agent configuration reference](/dagster-plus/deployment/agents/amazon-ecs/configuration-reference) +- [Kubernetes agent configuration reference](/dagster-plus/deployment/agents/kubernetes/configuration-reference) diff --git a/docs/content/dagster-plus/managing-deployments/dagster-plus-cli.mdx b/docs/content/dagster-plus/managing-deployments/dagster-plus-cli.mdx new file mode 100644 index 0000000000000..db9110234861a --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/dagster-plus-cli.mdx @@ -0,0 +1,165 @@ +--- +title: "Using the dagster-cloud CLI | Dagster Docs" +--- + +# Using the dagster-cloud CLI + +This guide is applicable to Dagster+. + +The `dagster-cloud` CLI is a command-line toolkit designed to work with Dagster+. + +In this guide, we'll cover how to install and configure the `dagster-cloud` CLI, get help, and use some helpful environment variables and CLI options. + +--- + +## Installing the CLI + +The Dagster+ Agent library is available in PyPi. To install, run: + +```shell +pip install dagster-cloud +``` + +Refer to the [configuration section](#configuring-the-cli) for next steps. + +### Completions + +Optionally, you can install command-line completions to make using the `dagster-cloud` CLI easier. + +To have the CLI install these completions to your shell, run: + +```shell +dagster-cloud --install-completion +``` + +To print out the completion for copying or manual installation: + +```shell +dagster-cloud --show-completion +``` + +--- + +## Configuring the CLI + +The recommended way to set up your CLI's config for long-term use is through the configuration file, located by default at `~/.dagster_cloud_cli/config`. + +### Setting up the configuration file + +Set up the config file: + +```shell +dagster-cloud config setup +``` + +Select your authentication method. **Note**: Browser authentication is the easiest method to configure. + +
      +BROWSER AUTHENTICATION + +The easiest way to set up is to authenticate through the browser. + +```shell +$ dagster-cloud config setup +? How would you like to authenticate the CLI? (Use arrow keys) + » Authenticate in browser + Authenticate using token +Authorized for organization `hooli` + +? Default deployment: prod +``` + +When prompted, you can specify a default deployment. If specified, a deployment won't be required in subsequent `dagster-cloud` commands. The default deployment for a new Dagster+ organization is `prod`. + +
      + +
      +TOKEN AUTHENTICATION + +Alternatively, you may authenticate using a user token. Refer to the [Managing user and agent tokens guide](/dagster-plus/account/managing-user-agent-tokens) for more info. + +```shell +$ dagster-cloud config setup +? How would you like to authenticate the CLI? (Use arrow keys) + Authenticate in browser + » Authenticate using token + +? Dagster+ organization: hooli +? Dagster+ user token: ************************************* +? Default deployment: prod +``` + +When prompted, specify the following: + +- **Organization** - Your organization name as it appears in your Dagster+ URL. For example, if your Dagster+ instance is `https://hooli.dagster.cloud/`, this would be `hooli`. +- **User token** - The user token. +- **Default deployment** - **Optional**. A default deployment. If specified, a deployment won't be required in subsequent `dagster-cloud` commands. The default deployment for a new Dagster+ organization is `prod`. + +
      + +### Viewing and modifying the configuration file + +To view the contents of the CLI configuration file, run: + +```shell +$ dagster-cloud config view + +default_deployment: prod +organization: hooli +user_token: '*******************************8214fe' +``` + +Specify the `--show-token` flag to show the full user token. + +To modify the existing config, re-run: + +```shell +dagster-cloud config setup +``` + +--- + +## Toggling between deployments + +To quickly toggle between deployments, run: + +```shell +dagster-cloud config set-deployment +``` + +--- + +## Getting help + +To view help options in the CLI: + +```shell +dagster-cloud --help +``` + +--- + +## Reference + +- [Custom configuration file path](#custom-configuration-file-path) +- [Environment variables and CLI options](#environment-variables-and-cli-options) + +### Custom configuration file path + +Point the CLI at an alternate config location by specifying the `DAGSTER_CLOUD_CLI_CONFIG` environment variable. + +### Environment variables and CLI options + +Environment variables and CLI options can be used in place of or to override the CLI configuration file. + +The priority of these items is as follows: + +- **CLI options** - highest +- **Environment variables** +- **CLI configuration** - lowest + +| Setting | Environment variable | CLI flag | CLI config value | +| ------------ | ---------------------------- | ---------------------- | -------------------- | +| Organization | `DAGSTER_CLOUD_ORGANIZATION` | `--organization`, `-o` | `organization` | +| Deployment | `DAGSTER_CLOUD_DEPLOYMENT` | `--deployment`, `-d` | `default_deployment` | +| User Token | `DAGSTER_CLOUD_API_TOKEN` | `--user-token`, `-u` | `user_token` | diff --git a/docs/content/dagster-plus/managing-deployments/deployment-settings-reference.mdx b/docs/content/dagster-plus/managing-deployments/deployment-settings-reference.mdx new file mode 100644 index 0000000000000..6233d47a49be0 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/deployment-settings-reference.mdx @@ -0,0 +1,251 @@ +--- +title: "Dagster+ deployment settings reference" +description: "Detailed info about configurable settings for Dagster+ deployments." +--- + +# Dagster+ deployment settings reference + +This guide is applicable to Dagster+. + +This reference describes the settings that can be configured for full deployments in [Dagster+](/dagster-plus). + +Refer to the [Managing deployments in Dagster+ guide](/dagster-plus/managing-deployments/managing-deployments#configuring-deployment-settings) for info about configuring settings in the Dagster+ interface or using the `dagster-cloud` CLI. + +--- + +## Settings schema + +Settings are formatted using YAML. For example: + +```yaml +run_queue: + max_concurrent_runs: 10 + tag_concurrency_limits: + - key: "database" + value: "redshift" + limit: 5 + +run_monitoring: + start_timeout_seconds: 1200 + cancel_timeout_seconds: 1200 + max_runtime_seconds: 7200 + +run_retries: + max_retries: 0 + +sso_default_role: EDITOR +``` + +--- + +## Settings + +For each deployment, you can configure settings for: + +- [Run queue](#run-queue-run_queue) +- [Run monitoring](#run-monitoring-run_monitoring) +- [Run retries](#run-retries-run_retries) +- [SSO default role](#sso-default-role) +- [Non-isolated runs](#non-isolated-runs) + +### Run queue (run_queue) + +The `run_queue` settings allow you to specify how many runs can execute concurrently in the deployment. + +```yaml +run_queue: + max_concurrent_runs: 10 + tag_concurrency_limits: + - key: "database" + value: "redshift" + limit: 5 +``` + + + + The maximum number of runs that are allowed to be in progress at once. Set + to 0 to stop any runs from launching. Negative values aren't + permitted. + + + + A list of limits applied to runs with particular tags. +
        +
      • + Default - [] +
      • +
      + Each list item may have the following properties: +
        +
      • + key +
      • +
      • + value +
          +
        • + If defined, the limit is applied only to the{" "} + key-value pair. +
        • +
        • + If not defined, the limit is applied across all values + of the + key. +
        • +
        • + If set to a dict with applyLimitPerUniqueValue: true, + the limit is applied to the number of unique values for + the key. +
        • +
        +
      • +
      • + limit +
      • +
      +
      +
      + +### Run monitoring (run_monitoring) + +The `run_monitoring` settings allow you to define how long Dagster+ should wait for runs to start before making them as failed, or to terminate before marking them as canceled. + +```yaml +run_monitoring: + start_timeout_seconds: 1200 + cancel_timeout_seconds: 1200 + max_runtime_seconds: 7200 +``` + + + + The number of seconds that Dagster+ will wait after a run is launched for + the process or container to start executing. After the timeout, the run will + fail. This prevents runs from hanging in STARTING indefinitely + when the process or container doesn't start. +
        +
      • + Default - 1200 (20 minutes) +
      • +
      +
      + + The number of seconds that Dagster+ will wait after a run termination is + initiated for the process or container to terminate. After the timeout, the + run will move into a CANCELED state. This prevents runs from hanging in{" "} + CANCELING indefinitely when the process or container doesn't + terminate cleanly. +
        +
      • + Default - 1200 (20 minutes) +
      • +
      +
      + + The number of seconds that Dagster+ will wait after a run is moved into a + STARTED state for the run to complete. After the timeout, the run will be + terminated and moved into a FAILURE state. This prevents runs from hanging + in STARTED indefinitely if the process is hanging. +
        +
      • + Default - No limit +
      • +
      +
      +
      + +### Run retries (run_retries) + +The `run_retries` settings allow you to define how Dagster+ handles retrying failed runs in the deployment. + +```yaml +run_retries: + max_retries: 0 +``` + + + + The maximum number of times Dagster+ should attempt to retry a failed run. + Dagster+ will use the default if this setting is undefined. +
        +
      • + Default - 0 +
      • +
      +
      + + Whether to retry runs that failed due to assets or ops in the run failing. + Set this to false if you only want to retry failures that occur due to the + run worker crashing or unexpectedly terminating, and instead rely on op or + asset-level retry policies to retry assert or op failures. Setting this + field to false will only change retry behavior for runs on dagster version + 1.6.7 or greater. +
        +
      • + Default - 0 +
      • +
      +
      +
      + +### SSO default role + +The `sso_default_role` setting lets you configure the default role on the deployment which is granted to new users that log in via SSO. For more information on available roles, see the [Dagster+ permissions reference](/dagster-plus/account/managing-users/managing-user-roles-permissions#user-permissions-reference). + +```yaml +sso_default_role: EDITOR +``` + + + + If SAML SSO is enabled, this is the default role that will be assigned to + Dagster+ users for this deployment. If SAML SSO is not enabled, this setting + is ignored. +
        +
      • + Default - VIEWER +
      • +
      +
      +
      + +### Non-isolated runs + +Configure [non-isolated runs](/dagster-plus/deployment/serverless) on your deployment. + +```yaml +non_isolated_runs: + enabled: True + max_concurrent_non_isolated_runs: 1 +``` + + + + If enabled, the `Isolate run environment` checkbox will appear in the + Launchpad. +
        +
      • + Default - true +
      • +
      +
      + + A limit for how many non-isolated runs to launch at once. Once this limit is + reached, the checkbox will be greyed out and all runs will be isolated. This + helps to avoid running out of RAM on the code location server. +
        +
      • + Default - 1 +
      • +
      +
      +
      diff --git a/docs/content/dagster-plus/managing-deployments/environment-variables-and-secrets.mdx b/docs/content/dagster-plus/managing-deployments/environment-variables-and-secrets.mdx new file mode 100644 index 0000000000000..4de3b7593cc4c --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/environment-variables-and-secrets.mdx @@ -0,0 +1,474 @@ +--- +title: "Dagster+ environment variables and secrets | Dagster Docs" +--- + +# Dagster+ environment variables and secrets + +This guide is applicable to Dagster+. + + + +--- + +## Understanding environment variables and secrets + +- [Overview](#overview) +- [Storage and encryption](#storage-and-encryption) +- [Scope](#scope) +- [Reserved variables](#reserved-variables) + +### Overview + +There are two ways to declare and manage variables in Dagster+: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      + Dagster+ UI + + Agent configuration +
      + Deployment type support + + Serverless,{" "} + Hybrid + + Hybrid +
      + How it works + + Environment variables are managed in the Dagster+ UI. Values are pulled + from storage and decrypted when your code is executed. + + Environment variables are defined in the agent's configuration. + Variables set at the code location level will pass through Dagster+, + while those set at the deployment level bypass Dagster+ entirely. Refer + to the{" "} + + Setting environment variables for Dagster+ agents guide + {" "} + for more info. +
      + Requirements + +
        +
      • + Dagster code uses version 1.0.17 or later +
      • +
      • + If using Hybrid, agent + uses Dagster version 1.0.17 or later +
      • +
      • + + Editor, Admin, or Organization Admin permissions + {" "} + in Dagster+. Note: Editors and Admins can only set + environment variables for deployments where they're an Editor or + Admin. +
      • +
      +
      + Ability to modify your dagster.yaml and{" "} + + dagster_cloud.yaml + {" "} + files +
      + Limitations + +
        +
      • + Maximum of 1,000 variables per full deployment +
      • +
      • Variables must be less than or equal to 4KB in size
      • +
      • + Variable names: +
          +
        • Must be 512 characters or less in length
        • +
        • Must start with a letter or underscore
        • +
        • Must contain only letters, numbers, and underscores
        • +
        • + May not be the same as{" "} + built-in (system) variables +
        • +
        +
      • +
      +
      + Variable names: +
        +
      • Must start with a letter or underscore
      • +
      • Must contain only letters, numbers, and underscores
      • +
      +
      + Storage and encryption + + Uses Amazon Key Management Services (KMS) and envelope encryption. Refer + to the{" "} + Storage and encryption section for + more info. + Dependent on agent type (ex: Kubernetes)
      + Scope + + Scoped by deployment (full and branch) and optionally, code location + + Scoped by code location. Variables can be set for a full deployment (all + code locations) or on a per-code location basis. +
      + +### Storage and encryption + + + This section is applicable only if using the Dagster+ UI to manage environment + variables. + + +To securely store environment variables defined using the Dagster+ UI, Dagster+ uses [Amazon Key Management Services (KMS)](https://docs.aws.amazon.com/kms/index.html) and [envelope encryption](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#enveloping). Envelope encryption is a multi-layered approach to key encryption. Plaintext data is encrypted using a data key, and then the data under the data key is encrypted under another key. + +Here's a look at how it works in Dagster+: + +
      + Dagster+ encryption key hierarchy diagram +
      + +In Dagster+, each customer account is assigned a unique key, which then encrypts the data associated with that account. All customer keys are encrypted using a non-exportable AWS KMS master key. + +This approach isolates each account's data and reduces the risk of exposure by limiting the amount of data a single key can access. + +### Scope + + + This section is applicable only if using the Dagster+ UI to manage environment + variables. + + +Environment variables can be scoped to specific deployments and/or code locations. When creating or modifying an environment variable, you'll be prompted to select the deployment(s) to scope the variable to: + +- **Full deployment** - Variables with this scope will be available to selected code locations in the full deployment + +- **Branch deployments** - Variables with this scope will be available to selected code locations in Branch Deployments. + + **Note**: While viewing a Branch Deployment in Dagster+, variables will be read-only. Environment variables must be managed in the Branch Deployment's parent full deployment, which will usually be `prod`. + +- **Local** - Variables with this scope will be included when [downloading variables to a local `.env` file](#exporting-variables-to-a-env-file) + +By default, new environment variables default to all deployments and all code locations. + +#### Same variable, different scopes and values + +You can create multiple instances of the same environment variable with different values for different scopes. Each instance of the variable can then be scoped to a deployment and code location (or locations). This approach can be useful for parameterizing behavior by environment without needing to modify your application code. + +For example, let's say we want to use different database passwords in production and testing (Branch Deployments). In our code, we use the `SNOWFLAKE_PASSWORD` environment variable to pass in the database password. To use different passwords between production and Branch Deployments, we can create two instances of `SNOWFLAKE_PASSWORD`. One instance is scoped to the `prod` deployment and the other only to Branch Deployments: + + + +In this example, the value of `SNOWFLAKE_PASSWORD` would be `production_password` in the `prod` deployment and `testing_password` in a Branch Deployment. + +### Reserved variables + + + This section is applicable only if using the Dagster+ UI to manage environment + variables. + + +[Built-in (system) Dagster+ environment variables](/dagster-plus/managing-deployments/reserved-environment-variables) are reserved and therefore unavailable for use. [An error will surface in Dagster+](#troubleshooting) if a built-in variable name is used. + +--- + +## Managing environment variables + +The simplest way to manage environment variables is to use Dagster+'s built-in manager which allows you to create and manage environment variables right in the UI. + +### Using the Dagster+ UI + + + To manage environment variables using the Dagster+ UI, you need: +
        +
      • + Organization, Admin, or{" "} + Editor permissions. If you're a Dagster+{" "} + + Editor or Admin + + , you can only set environment variables for full deployments where you're + an Editor or Admin. +
      • +
      • + Dagster code on version 1.0.17 or later. If using Hybrid, your agent must + also use 1.0.17 or later. +
      • +
      +
      + +- [Creating new variables](#creating-new-variables) +- [Editing, viewing, and deleting variables](#editing-viewing-and-deleting-variables) +- [Exporting variables to a `.env` file](#exporting-variables-to-a-env-file) + +#### Creating new variables + +1. Sign in to your Dagster+ account. + +2. Click **Deployment > Environment variables**. + +3. Click **+ Add Environment Variable** to add a new environment variable. + +4. In the window that displays, fill in the following: + + - **Name** - Enter a name for the environment variable. This is how the variable will be referenced in your code. + - **Value** - Enter a value for the environment variable. + - In **Deployment Scope**, select the deployment(s) where the variable should be accessible: + - **Full deployment** - The variable will be available to selected code locations in the full deployment + - **Branch deployments** - The variable will be available to selected code locations in Branch Deployments + - **Local** - If selected, the variable will be included when [exporting environment variables to a local `.env` file](#exporting-variables-to-a-env-file) + - In **Code Location Scope**, select the code location(s) where the variable should be accessible. At least one code location is required. Refer to the [Scope](#scope) section for more info. + + For example: + + Create new environment variable dialog window in Dagster+ + +5. When finished, click **Save**. Dagster+ will automatically re-deploy the workspace to apply the changes. + +#### Editing, viewing, and deleting variables + +After the environment variable is created: + +- **To edit an environment variable**, click the **Edit** button. +- **To view an environment variable's value**, click the **eye icon** in the variable's **Value** column. Click the icon again to conceal the value. +- **To delete an environment variable**, click the **Trash icon** and confirm the deletion when prompted. + +#### Exporting variables to a .env file + +All variables with the `local` deployment scope can be exported to an `.env` file and used locally. To create the file: + +1. In the **Environment variables** tab, click the menu next to **+ Add environment variable**: + + Highlighted Download local variables file in Environment Variables tab of Dagster+ + +2. Click **Download local environment variables** + +3. Save the file. **Note**: If saved to the same folder where you run `dagster-webserver`, Dagster will [automatically load the variables in the `.env` file](/guides/dagster/using-environment-variables-and-secrets#declaring-environment-variables) when the webserver is started. + +### Using agent configuration (Hybrid) + +Only supported for Hybrid deployments. + +For Dagster+ Hybrid deployments, making environment variables accessible is accomplished by adding them to your agent's configuration. How this is accomplished depends on the agent type. + +Refer to the [Setting environment variables for Dagster+ agents guide](/dagster-plus/managing-deployments/setting-environment-variables-agents) for more info. + +--- + +## Accessing environment variables in Dagster code + +Ready to start using environment variables in your Dagster code? Refer to the [Using environment variables and secrets in Dagster code guide](/guides/dagster/using-environment-variables-and-secrets) for more info and examples. + +--- + +## Reserved environment variables + +[Dagster+](/dagster-plus) provides a set of built-in, automatically populated environment variables, such as the name of a deployment or details about a branch deployment commit, that can be used to modify behavior based on environment. Refer to the [Reserved Dagster+ environment variables reference](/dagster-plus/managing-deployments/reserved-environment-variables) for more information. + +--- + +## Troubleshooting + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      + Error + Description and resolution
      + [variable] is a reserved environment variable name. + + Dagster reserves the names of{" "} + built-in (system) variables. To + resolve, change the variable's name to a name that isn't currently{" "} + reserved and meets the other naming + requirements. +
      + + Environment variables must be no greater than 4KB in size. + + + To resolve, reduce the size of the environment variable's value to less + than the maximum of 4KB. +
      + + Environment variable names must be no longer than 512 characters. + + + To resolve, reduce the number of characters in the variable's name. +
      + Invalid environment variable name [variable] + + The name of the environment variable doesn't meet one or several of + Dagster's naming requirements. To resolve, change the variable's name + to: +
        +
      • Start with a letter or underscore
      • +
      • Contain only letters, numbers, and underscores
      • +
      +
      + + Deployment [deployment_name] has reached the maximum of 1,000 + environment variables. + + + The maximum number of environment variables for the full deployment has + been reached. New variables cannot be added. Remove any unneeded + variables to reduce the total below the maximum, then add new variables. +
      + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/managing-deployments.mdx b/docs/content/dagster-plus/managing-deployments/managing-deployments.mdx new file mode 100644 index 0000000000000..650b81f513c37 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/managing-deployments.mdx @@ -0,0 +1,198 @@ +--- +title: Managing deployments in Dagster+ | Dagster Docs +description: Learn to manage and configure your Dagster+ deployments. +--- + +# Managing deployments in Dagster+ + +This guide is applicable to Dagster+. + +In Dagster+, there are two types of deployments: + +- [**Branch deployments**](/dagster-plus/managing-deployments/branch-deployments), which are temporary deployments built for testing purposes +- **Full deployments**, which are persistent, fully-featured deployments intended to perform actions on a recurring basis + +This guide will focus on **full deployments**, hereafter referred to simply as deployments. + +--- + +## Understanding deployments + +Deployments are standalone environments, allowing you to operate independent instances of Dagster with separately managed permissions. + +When a Dagster+ organization is created, a single deployment named `prod` will also be created. To create additional deployments, a [Pro plan](https://dagster.io/pricing) is required. + +Each deployment can have one or multiple [code locations](/dagster-plus/managing-deployments/code-locations). + +**Concerned about testing environments?** We recommend using Branch Deployments to test your changes, even if you're able to create additional deployments. Branch deployments are available for all Dagster+ users, regardless of plan. + +Refer to the [Branch Deployment docs](/dagster-plus/managing-deployments/branch-deployments) for more info, or the [Testing against production using Branch Deployments guide](/guides/dagster/branch_deployments) for a real-world example. + +--- + +## Viewing and switching deployments + +In Dagster+, you can view and switch between deployments using the **deployment switcher**: + + + +To view all deployments, click **View all deployments**. + +--- + +## Creating deployments + + + + Organization Admin permissions + {" "} + are required to create deployments. Additionally, note that creating multiple + deployments requires an{" "} + + Pro plan + + . + + +To create a deployment: + +1. Sign in to your Dagster+ account. +2. Access the **Deployments** page using one of the following options: + - Click the **deployment switcher > View all deployments**. + - Click **your user icon > Organization Settings > Deployments**. +3. Click the **+ New deployment** button. +4. In the modal that displays, fill in the following: + - **Name** - Enter a name for the deployment. + - **Initial deployment permissions** - Select the permissions you want to use to create the deployment: + - **Empty permissions** - Creates the deployment with an empty set of permissions. **Note**: Only Organization Admins will be able to manage the deployment until other uses are granted Admin or Editor permissions. + - **Copy from** - Creates the deployment using permissions duplicated from an existing deployment. +5. When finished, click **Create deployment**. + +--- + +## Deleting deployments + + + + Organization Admin permissions + {" "} + are required to delete deployments. Additionally, note that deleting a + deployment also deletes all its associated data, including code locations, + jobs, schedules, and sensors. + + +To delete a deployment: + +1. Sign in to your Dagster+ account. +2. Access the **Deployments** page using one of the following options: + - Click the **deployment switcher > View all deployments**. + - Click the **deployment switcher**, then the **gear icon** next to the deployment. + - Click **your user icon > Organization Settings > Deployments**. +3. Click the **Delete** button next to the deployment you want to delete. +4. When prompted, confirm the deletion. + +--- + +## Configuring deployment settings + + + + Editor permissions + {" "} + are required to modify deployment settings. + + +Deployment settings can be configured in the Dagster+ interface or using the `dagster-cloud` CLI. Refer to the [deployment settings reference](/dagster-plus/managing-deployments/deployment-settings-reference) for more info about individual settings. + + + + To configure deployment settings in the Dagster+ UI: + +
        +
      1. Sign in to your Dagster+ account.
      2. +
      3. Access the Deployments page using one of the following: +
          +
        • Click the deployment switcher > View all deployments.
        • +
        • Click the deployment switcher, then the gear icon next to the deployment.
        • +
        • Click your user icon > Organization Settings > Deployments.
        • +
        +
      4. +
      5. Click the Settings button next to the deployment you want to configure.
      6. +
      7. In the window that displays, configure settings for the deployment.
      8. +
      9. When finished, click Save deployment settings.
      10. +
      +
      + + +Note: dagster-cloud 0.13.14 or later must be installed to run the CLI. Agent and/or job code doesn't need to be upgraded. + + +Create a file with the settings you'd like to configure. For example: + +```yaml +# my-settings.yaml + +run_queue: + max_concurrent_runs: 10 + tag_concurrency_limits: + - key: "special-runs" + limit: 5 + +run_monitoring: + start_timeout_seconds: 1200 + cancel_timeout_seconds: 1200 + +run_retries: + max_retries: 0 +``` + +Use the CLI to upload the settings file: + +```shell +dagster-cloud deployment settings set-from-file my-settings.yaml +``` + +This will replace all of your configured settings. Any that are not specified will resort to their default values. You also use the CLI to read your current settings, including the default values: + +```shell +dagster-cloud deployment settings get +``` + + +
      + +--- + +## Related + + + + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/reserved-environment-variables.mdx b/docs/content/dagster-plus/managing-deployments/reserved-environment-variables.mdx new file mode 100644 index 0000000000000..10891282c9ed8 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/reserved-environment-variables.mdx @@ -0,0 +1,86 @@ +--- +title: "Reserved Dagster+ environment variables | Dagster Docs" +--- + +# Reserved Dagster+ environment variables + +[Dagster+](/dagster-plus) provides a set of built-in, automatically populated environment variables, such as the name of a deployment or details about a branch deployment commit, that can be used to modify behavior based on environment. + +--- + +## Reserved deployment variables + +The following variables are available in every deployment of your Dagster+ instance, including full (e.g., `prod`) and branch deployments. + + + + The name of the Dagster+ deployment. For example, prod. + + + If 1, the deployment is a{" "} + + branch deployment + + . Refer to the + Branch Deployment variables section + for a list of variables available in branch deployments. + + + +--- + +## Reserved Branch Deployment variables + +The following environment variables are currently available only in a [branch deployment](/dagster-plus/managing-deployments/branch-deployments). + +For every commit made to a branch, the following metadata is available: + + + + The SHA of the commit. + + + The time the commit occurred. + + + The email of the git user who authored the commit. + + + The name of the git user who authored the commit. + + + The message associated with the commit. + + + The name of the branch associated with the commit. + + + The name of the repository associated with the commit. + + + The ID of the pull request associated with the commit. + + + The status of the pull request at the time of the commit. Possible values + are OPEN, CLOSED, and MERGED. + + + +--- + +## Related + + + + + + diff --git a/docs/content/dagster-plus/managing-deployments/setting-environment-variables-agents.mdx b/docs/content/dagster-plus/managing-deployments/setting-environment-variables-agents.mdx new file mode 100644 index 0000000000000..9053a96bc96e3 --- /dev/null +++ b/docs/content/dagster-plus/managing-deployments/setting-environment-variables-agents.mdx @@ -0,0 +1,246 @@ +--- +title: Setting Dagster+ environment variables using agent configuration | Dagster Docs +description: Set environment variables in your Dagster+ agent. +--- + +# Setting Dagster+ environment variables using agent configuration + +This guide is applicable to Dagster+. + +In this guide, we'll walk you through setting environment variables for a Dagster+ [Hybrid deployment](/dagster-plus/deployment/hybrid) using the Hybrid agent's configuration. + +There are two ways to set environment variables: + +- **On a per-code location basis**, which involves modifying the `dagster_cloud.yaml` file. **Note**: This approach is functionally the same as [setting environment variables using the Dagster+ UI](/dagster-plus/managing-deployments/environment-variables-and-secrets). Values will pass through Dagster+. +- **For a full deployment and all the code locations it contains**. This approach makes variables available for all code locations in a full Dagster+ deployment. As values are pulled from the user cluster, values will bypass Dagster+ entirely. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account using [Hybrid deployment](/dagster-plus/deployment/hybrid) +- An existing [Hybrid agent](/dagster-plus/deployment/agents) +- **Editor**, **Admin**, or **Organization Admin** permissions in Dagster+ + +--- + +## Setting environment variables for a code location + + + To set environment variables, you need{" "} + + one of the following user roles + {" "} + in Dagster+: +
        +
      • Organization Admin, or
      • +
      • + Editor or Admin. Note: Editors and Admins can only set + environment variables in full deployments where you're an Editor or Admin. +
      • +
      +
      + +Setting environment variables for specific code locations is accomplished by adding them to your agent's configuration in your project's [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml). The `container_context` property in this file sets the variables in the agent's environment. + +**Note**: This approach is functionally the same as [setting environment variables using the Dagster+ UI](/dagster-plus/managing-deployments/environment-variables-and-secrets). + +How `container_context` is configured depends on the agent type. Click the tab for your agent type to view instructions. + + + + +### Amazon ECS agents + + + +After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster+ to apply the changes: + + + + + + +### Docker agents + + + +After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster+ to apply the changes: + + + + + + +### Kubernetes agents + + + +After you've modified `dagster_cloud.yaml`, redeploy the code location in Dagster+ to apply the changes: + + + + + + +--- + +## Setting environment variables for full deployments + + + If you're a Dagster+{" "} + + Editor or Admin + + , you can only set environment variables for full deployments where you're an + Editor + or Admin. + + +Setting environment variables for a full deployment will make the variables available for all code locations in the full deployment. Using this approach will pull variable values from your user cluster, bypassing Dagster+ entirely. + +Click the tab for your agent type to view instructions. + + + + +### Amazon ECS agents + +To make environment variables accessible to a full deployment with an Amazon ECS agent, you'll need to modify the agent's CloudFormation template as follows: + +1. Sign in to your AWS account. + +2. Navigate to **CloudFormation** and open the stack for the agent. + +3. Click **Update**. + +4. Click **Edit template in designer**. + +5. In the section that displays, click **View in Designer**. The AWS template designer will display. + +6. In the section displaying the template YAML, locate the `AgentTaskDefinition` section: + + Highlighted AgentTaskDefinition section of the AWS ECS agent CloudFormation template in the AWS Console + +7. In the `user_code_launcher.config` portion of the `AgentTaskDefinition` section, add the environment variables as follows: + + ```yaml + user_code_launcher: + module: dagster_cloud.workspace.ecs + class: EcsUserCodeLauncher + config: + cluster: ${ConfigCluster} + subnets: [${ConfigSubnet}] + service_discovery_namespace_id: ${ServiceDiscoveryNamespace} + execution_role_arn: ${TaskExecutionRole.Arn} + task_role_arn: ${AgentRole} + log_group: ${AgentLogGroup} + env_vars: + - SNOWFLAKE_USERNAME=dev + - SNOWFLAKE_PASSWORD ## pulled from agent environment + ' > $DAGSTER_HOME/dagster.yaml && cat $DAGSTER_HOME/dagster.yaml && dagster-cloud agent run" + ``` + +8. When finished, click the **Create Stack** button: + + Highlighted Create Stack button in the AWS Console + +9. You'll be redirected back to the **Update stack** wizard, where the new template will be populated. Click **Next**. + +10. Continue to click **Next** until you reach the **Review** page. + +11. Click **Submit** to update the stack. + + + + +### Docker agents + +To make environment variables accessible to a full deployment with a Docker agent, you'll need to modify your project's `dagster.yaml` file. + +In the `user_code_launcher` section, add an `env_vars` property as follows: + +```yaml +# dagster.yaml + +user_code_launcher: + module: dagster_cloud.workspace.docker + class: DockerUserCodeLauncher + config: + networks: + - dagster_cloud_agent + env_vars: + - SNOWFLAKE_PASSWORD # value pulled from agent's environment + - SNOWFLAKE_USERNAME=dev +``` + +In `env_vars`, specify the environment variables as keys (`SNOWFLAKE_PASSWORD`) or key-value pairs (`SNOWFLAKE_USERNAME=dev`). If only `KEY` is provided, the value will be pulled from the agent's environment. + + + + +### Kubernetes agents + +To make environment variables to a full deployment with a Kubernetes agent, you'll need to modify and upgrade the Helm chart's `values.yaml`. + +1. In `values.yaml`, add or locate the `workspace` value. + +2. Add an `envVars` property as follows: + + ```yaml + # values.yaml + + workspace: + envVars: + - SNOWFLAKE_PASSWORD # value pulled from agent's environment + - SNOWFLAKE_USERNAME=dev + ``` + +3. In `envVars`, specify the environment variables as keys (`SNOWFLAKE_PASSWORD`) or key-value pairs (`SNOWFLAKE_USERNAME=dev`). If only `KEY` is provided, the value will be pulled from the local (agent's) environment. + +4. Upgrade the Helm chart. + + + + +--- + +## Related + + + + + diff --git a/docs/content/dagster-plus/references/ci-cd-file-reference.mdx b/docs/content/dagster-plus/references/ci-cd-file-reference.mdx new file mode 100644 index 0000000000000..5a87beb091e6c --- /dev/null +++ b/docs/content/dagster-plus/references/ci-cd-file-reference.mdx @@ -0,0 +1,181 @@ +--- +title: "Dagster+ CI/CD file reference | Dagster Docs" +description: "Learn about the files Dagster+ uses to manage your deployments." +--- + +# Dagster+ CI/CD file reference + +This reference is applicable to Dagster+. + +When you import a project into Dagster+ from GitHub or Gitlab, a few `.yml` files will be added to the repository. These files are essential as they manage the deployments in Dagster+. + +--- + +## branch_deployments.yml + + + + + + + + + + + + + + + + + + + + +
      + Name + branch_deployments.yml
      + Status + Active
      + Required + + Required to use{" "} + + Branch Deployments + +
      + Description + + Defines the steps required to use Branch Deployments.
      +
      + Note: This file must be manually added to the + repository if using a{" "} + Hybrid deployment. +
      + +--- + +## deploy.yml + + + + + + + + + + + + + + + + + + + + +
      + Name + deploy.yml
      + Status + Active
      + Required + Required for Dagster+
      + Description + + Defines the steps required to deploy a project in Dagster+, including + running checks, checking out the project directory, and deploying the + project. Additionally, note the following: +
        +
      • + + If using a{" "} + Hybrid deployment + + , this file must be manually added to the repository. +
      • +
      • + If using dbt, some steps may need to be added to + successfully deploy your project. Refer to the{" "} + + Using dbt with Dagster+ guide + {" "} + for more information. +
      • +
      +
      + +--- + +## Related + + + + + + + + + diff --git a/docs/content/deployment.mdx b/docs/content/deployment.mdx index e9046cc5b22be..4e8d180247710 100644 --- a/docs/content/deployment.mdx +++ b/docs/content/deployment.mdx @@ -1,29 +1,125 @@ --- -title: Deployment | Dagster +title: Deploying Dagster with your infrastructure | Dagster Docs --- -# Deployment options +# Deploying Dagster with your infrastructure -Explore your options for deploying Dagster to your infrastructure or using Dagster Cloud. + + This section is applicable to Dagster Open Source. If you're looking for + Dagster+, click here. + + +Learn how to deploy and execute Dagster with these hands-on guides. --- -## Running Dagster locally +## Overview -Want to get Dagster up and running on your local machine? Check out the [Running Dagster locally guide](/guides/running-dagster-locally) to learn more. +Get started with a look at Dagster's architecture, including a primer on deployment concepts. [Learn more](/deployment/overview). --- -## Deploying to your infrastructure +## Concepts -Ready to deploy Dagster to your infrastructure? Use these resources to learn more: +Learn about the concepts relevant to deploying Dagster. -- [Dagster architecture](/deployment/overview) - An overview of Dagster's architecture -- [Concepts](/deployment/concepts) - Understand the concepts relevant to deploying Dagster -- [Guides](/deployment/guides) - Step-by-step guides for deploying and executing Dagster on services like Amazon Web Services, Docker, Google Cloud Platform, and Kubernetes + + + + + + + + + --- -## Deploying with Dagster Cloud +## Guides + +- [Deploying Dagster](#deploying-dagster) +- [Executing Dagster](#executing-dagster) + +### Deploying Dagster + +Check out these guides to learn the basics of Dagster deployment, including setting up Dagster and running the Dagster webserver. + + + + + + + + + +**Using Helm?** Check out the following guides to walk you through Dagster deployment: + + + + + + + + +### Executing Dagster + +For more advanced job execution, you can submit to different compute substrates. These can be mixed and matched. For example, you can deploy the Dagster webserver on Amazon Web Services (AWS) EC2 and execute jobs on a Dask cluster. -If using Dagster Cloud, some or all of the deployment infrastructure is managed by Dagster on your behalf. Check out the [Dagster Cloud docs](/dagster-cloud) to learn more. + + + + diff --git a/docs/content/deployment/concepts.mdx b/docs/content/deployment/concepts.mdx index ef1e6802b6363..d1869c938dd97 100644 --- a/docs/content/deployment/concepts.mdx +++ b/docs/content/deployment/concepts.mdx @@ -5,8 +5,8 @@ title: Dagster Deployment Concepts | Dagster Docs # Dagster Deployment Concepts - This is applicable to Dagster Open Source. If you're looking for Dagster - Cloud, click here. + This is applicable to Dagster Open Source. If you're looking for Dagster+,{" "} + click here. Learn about the concepts relevant to deploying Dagster. Refer to the [Core concepts section](/concepts) for info on developing with Dagster. diff --git a/docs/content/deployment/dagster-daemon.mdx b/docs/content/deployment/dagster-daemon.mdx index ceaf8374d170f..c62f97b56e63e 100644 --- a/docs/content/deployment/dagster-daemon.mdx +++ b/docs/content/deployment/dagster-daemon.mdx @@ -5,7 +5,7 @@ description: Several Dagster features require a long-running daemon process with # Dagster daemon -Several Dagster features, like [schedules](/concepts/partitions-schedules-sensors/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), and [run queueing](/guides/customizing-run-queue-priority), require a long-running `dagster-daemon` process to be included with your deployment. +Several Dagster features, like [schedules](/concepts/automation/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), and [run queueing](/guides/customizing-run-queue-priority), require a long-running `dagster-daemon` process to be included with your deployment. --- @@ -77,7 +77,7 @@ The following daemons are currently available:
      Scheduler daemon Creates runs from active{" "} - schedules + schedules Enabled / runs as long as the default{" "} diff --git a/docs/content/deployment/dagster-instance.mdx b/docs/content/deployment/dagster-instance.mdx index 6e1da6498ba92..c6c4471c1d3e5 100644 --- a/docs/content/deployment/dagster-instance.mdx +++ b/docs/content/deployment/dagster-instance.mdx @@ -6,8 +6,8 @@ description: "Define configuration options for your Dagster instance." # Dagster instance - This guide is applicable to Dagster Open Source (OSS) deployments. For Dagster - Cloud, refer to the Dagster Cloud documentation. + This guide is applicable to Dagster Open Source (OSS) deployments. For + Dagster+, refer to the Dagster+ documentation. The Dagster instance defines the configuration that Dagster needs for a single deployment - for example, where to store the history of past runs and their associated logs, where to stream the raw logs from op compute functions, and how to launch new runs. @@ -323,7 +323,7 @@ storage: To use MySQL storage, you'll need to install the{" "} - dagster-postgres library. + dagster-mysql library. To use a MySQL database () for storage, configure `storage.mysql` in `dagster.yaml`: @@ -732,15 +732,15 @@ Refer to the [Telemetry documentation](/about/telemetry) for more info. The `code_servers` key allows you to configure how Dagster loads the code in a [code location](/concepts/code-locations/workspace-files). -When you aren't [running your own gRPC server](/concepts/code-locations/workspace-files#running-your-own-grpc-server), the webserver and the Dagster daemon load your code from a gRPC server running in a subprocess. By default, if your code takes more than 60 seconds to load, Dagster assumes that it's hanging and stops waiting for it to load. +When you aren't [running your own gRPC server](/concepts/code-locations/workspace-files#running-your-own-grpc-server), the webserver and the Dagster daemon load your code from a gRPC server running in a subprocess. By default, if your code takes more than 180 seconds to load, Dagster assumes that it's hanging and stops waiting for it to load. -If you expect that your code will take longer than 60 seconds to load, set the `code_servers.local_startup_timeout` key. The value should be an integer that indicates the maximum timeout, in seconds. +If you expect that your code will take longer than 180 seconds to load, set the `code_servers.local_startup_timeout` key. The value should be an integer that indicates the maximum timeout, in seconds. ```yaml file=/deploying/dagster_instance/dagster.yaml startafter=start_marker_code_servers endbefore=end_marker_code_servers # Configures how long Dagster waits for code locations # to load before timing out. code_servers: - local_startup_timeout: 120 + local_startup_timeout: 360 ``` ### Data retention @@ -791,13 +791,3 @@ schedules: ``` You can also set the optional `num_submit_workers` key to evaluate multiple run requests from the same schedule tick in parallel, which can help decrease latency when a single schedule tick returns many run requests. - -### Auto-materialize - -The `auto_materialize` key allows you to adjust configuration related to [auto-materializing assets](/concepts/assets/asset-auto-execution). - -```yaml file=/deploying/dagster_instance/dagster.yaml startafter=start_marker_auto_materialize endbefore=end_marker_auto_materialize -auto_materialize: - run_tags: - key: value -``` diff --git a/docs/content/deployment/executors.mdx b/docs/content/deployment/executors.mdx index 1365ab45db772..14e175dbacd10 100644 --- a/docs/content/deployment/executors.mdx +++ b/docs/content/deployment/executors.mdx @@ -37,13 +37,11 @@ from dagster import graph, job, multiprocess_executor # Providing an executor using the job decorator @job(executor_def=multiprocess_executor) -def the_job(): - ... +def the_job(): ... @graph -def the_graph(): - ... +def the_graph(): ... # Providing an executor using graph_def.to_job(...) @@ -69,8 +67,7 @@ asset_job = define_asset_job("the_job", selection="*") @job -def op_job(): - ... +def op_job(): ... # op_job and asset_job will both use the multiprocess_executor, diff --git a/docs/content/deployment/guides.mdx b/docs/content/deployment/guides.mdx index 97f8fcfa15e72..65d72e916638f 100644 --- a/docs/content/deployment/guides.mdx +++ b/docs/content/deployment/guides.mdx @@ -6,7 +6,7 @@ title: Dagster Deployment Guides | Dagster Docs These guides are applicable to Dagster Open Source. If you're looking for - Dagster Cloud, click here. + Dagster+, click here. Learn how to deploy and execute Dagster with these hands-on guides. diff --git a/docs/content/deployment/guides/aws.mdx b/docs/content/deployment/guides/aws.mdx index 8f2c13c5e80fb..6b3da54e7aef1 100644 --- a/docs/content/deployment/guides/aws.mdx +++ b/docs/content/deployment/guides/aws.mdx @@ -83,8 +83,8 @@ run_launcher: class: "EcsRunLauncher" config: run_resources: - cpu: 256 - memory: 512 # In MiB + cpu: "256" + memory: "512" # In MiB ephemeral_storage: 128 # In GiB ``` diff --git a/docs/content/deployment/guides/gcp.mdx b/docs/content/deployment/guides/gcp.mdx index 10224fdd4aec8..913cf836d6e3f 100644 --- a/docs/content/deployment/guides/gcp.mdx +++ b/docs/content/deployment/guides/gcp.mdx @@ -61,8 +61,7 @@ from dagster import job } }, ) -def gcs_job(): - ... +def gcs_job(): ... ``` With this in place, your job runs will store outputs on GCS in the location `gs:///dagster/storage//files/.compute`. diff --git a/docs/content/deployment/guides/kubernetes.mdx b/docs/content/deployment/guides/kubernetes.mdx index 7cab980aa37d3..6248b61ae6a81 100644 --- a/docs/content/deployment/guides/kubernetes.mdx +++ b/docs/content/deployment/guides/kubernetes.mdx @@ -1,9 +1,9 @@ --- -title: "Dagster + Helm deployment guides | Dagster Docs" +title: "Dagster & Helm deployment guides | Dagster Docs" description: "Guides for deploying Dagster with Helm." --- -# Dagster + Helm deployment guides +# Dagster & Helm deployment guides Check out these guides to learn the basics of Dagster deployment using Helm. [Go back to all deployment guides](/deployment/guides). diff --git a/docs/content/deployment/guides/kubernetes/customizing-your-deployment.mdx b/docs/content/deployment/guides/kubernetes/customizing-your-deployment.mdx index e3df087ef0c6b..76047c56c4ac7 100644 --- a/docs/content/deployment/guides/kubernetes/customizing-your-deployment.mdx +++ b/docs/content/deployment/guides/kubernetes/customizing-your-deployment.mdx @@ -116,11 +116,13 @@ def my_job(): Other run launchers will ignore the `dagster-k8s/config` tag. -If your Dagster job is configured with the that runs each step in its own pod, configuration that you set on a job using the `dagster-k8s/config` tag will _not_ be propagated to any of those step pods. +The default [executor](/deployment/executors) - - will run each job in its own pod, executing each step in an individual process. If your Dagster job produces assets which have a short compute time (compared to the step overhead time), consider avoiding the step process creation cost by using the executor, which runs each step serially in a single process. This can be especially useful where parallelism is obtained through a and is unnecessary within the job. + +For this use-case, the is more efficient than running each step in its own process. The is contraindicated, as the delay of scheduling and starting up a new Dagster pod to execute every step would significantly slow down overall execution time. ### Kubernetes configuration on every step in a run -If your Dagster job is configured with the that runs each step in its own pod, you can use the `step_k8s_config` field on the executor to control the Kubernetes configuration for every step pod. +If your Dagster job is configured with the that runs each step in its own pod, configuration that you set on a job using the `dagster-k8s/config` tag will _not_ be propagated to any of those step pods. Use the `step_k8s_config` field on the executor to control the Kubernetes configuration for every step pod. `step_k8s_config` is a dictionary with the following keys: @@ -214,41 +216,88 @@ Other executors will ignore the `dagster-k8s/config` tag when it is set on an op ### Precedence rules -By default, if a Kubernetes configuration dictionary (like `container_config`) is specified at both the instance level in the Helm chart and in a specific Dagster job or op, the dictionaries will be shallowly merged. The more specific configuration takes precedence if the same key is set in both dictionaries. +Kubernetes configuration can be applied at several different scopes: + +- At the deployment level, applying to every run in the deployment +- At the code location, applying to every run launched from the code location +- At the job level, applying to every run launched for that job +- At the step level, if using the + +By default, if Kubernetes configuration is specified in multiple places, the configuration is merged recursively. Scalar values will be replaced by the configuration for the more specific scope, dictionary fields will be combined, and list fields will be appended to each other, discarding duplicate values. List fields that cannot be meaningfully appended, like `command` or `args`, are replaced. Consider the following example: - **In the Helm chart**, `k8sRunLauncher.runK8sConfig.podSpecConfig` is set to: ```json - { "node_selector": { "disktype": "ssd" }, "dns_policy": "ClusterFirst" } + { + "node_selector": { "disktype": "ssd" }, + "dns_policy": "ClusterFirst", + "image_pull_secrets": [{ "name": "my-secret" }] + } ``` -- **But a specific job** has the `pod_spec_config` key in the `dagster-k8s/config` tag set to: +- **But a specific job** has the `dagster-k8s/config` tag set to: ```json - { "node_selector": { "region": "east" } } + { + "pod_spec_config": { + "node_selector": { "region": "east" }, + "dns_policy": "Default", + "image_pull_secrets": [{ "name": "another-secret" }] + } + } ``` -Then the node selector from the job and the DNS policy from the Helm chart will be applied, since only the node selector is overridden in the job. +The job will merge the two `node_selector` dictionaries, append the two `image_pull_secrets` lists, and replace the `dns_policy` scalar value. The resulting `pod_spec_config` will be: + +```json +{ + "node_selector": { "disktype": "ssd", "region": "east" }, + "dns_policy": "Default", + "image_pull_secrets": [{ "name": "my-secret" }, { "name": "another-secret" }] +} +``` -To customize this behavior, you can also set the `merge_behavior` key in the `dagster-k8s/config` tag to `DEEP` instead of `SHALLOW`. When `merge_behavior` is set to `DEEP`, the config dictionaries are merged recursively. Scalar values will still be replaced by the more specific configuration, but any dictionary fields will be combined, and list fields will be appended to each other. +To customize this behavior, you can also set the `merge_behavior` key in the `dagster-k8s/config` tag to `SHALLOW` instead of `DEEP`. When `merge_behavior` is set to `SHALLOW`, the dictionaries will be shallowly merged. The configuration for the more specific scope takes precedence if the same key is set in both dictionaries or if scalar values need to be replaced - for example, configuration at the code location level will replace configuration at the deployment level. To modify the previous example: -- **In the Helm chart**, `k8sRunLauncher.runK8sConfig.podSpecConfig` is again set to: +- **In the Helm chart**, `k8sRunLauncher.runK8sConfig.podSpecConfig` is set to: ```json - { "node_selector": { "disktype": "ssd" }, "dns_policy": "ClusterFirst" } + { + "node_selector": { "disktype": "ssd" }, + "dns_policy": "ClusterFirst", + "image_pull_secrets": [{ "name": "my-secret" }] + } ``` -- **But a specific job** has the `pod_spec_config` key in the `dagster-k8s/config` tag set to: +- **But a specific job** has the `dagster-k8s/config` tag set to: ```json - { "node_selector": { "region": "east" }, "merge_behavior": "DEEP" } + { + "pod_spec_config": { + "node_selector": { "region": "east" }, + "image_pull_secrets": [{ "name": "another-secret" }] + }, + "merge_behavior": "SHALLOW" + } ``` -Then the job will merge the two `node_selector` dictionaries and use `{ "disktype": "ssd", "region": "east" }` as its `node_selector` configuration. +Since the `merge_behavior` is set to `SHALLOW`, the `node_selector` and `image_pull_secrets` from the job and the DNS policy from the Helm chart will be applied, since only the `node_selector` and `image_pull_secrets` are overridden in the job. + +The resulting `pod_spec_config` will be: + +```json +{ + "node_selector": { "region": "east" }, + "dns_policy": "ClusterFirst", + "image_pull_secrets": [{ "name": "another-secret" }] +} +``` + +**Note**: In Dagster code before version 1.7.0, the default merge behavior was `SHALLOW` instead of `DEEP`. --- diff --git a/docs/content/deployment/guides/kubernetes/deploying-with-helm.mdx b/docs/content/deployment/guides/kubernetes/deploying-with-helm.mdx index c51978c634973..b57b14a798019 100644 --- a/docs/content/deployment/guides/kubernetes/deploying-with-helm.mdx +++ b/docs/content/deployment/guides/kubernetes/deploying-with-helm.mdx @@ -158,7 +158,7 @@ By default, the webserver launches runs via the -The [daemon](/deployment/dagster-daemon) periodically checks the runs table in PostgreSQL for runs that are ready to be launched. The daemon also submits runs from [schedules](/concepts/partitions-schedules-sensors/schedules) and [sensors](/concepts/partitions-schedules-sensors/sensors). +The [daemon](/deployment/dagster-daemon) periodically checks the runs table in PostgreSQL for runs that are ready to be launched. The daemon also submits runs from [schedules](/concepts/automation/schedules) and [sensors](/concepts/partitions-schedules-sensors/sensors). The daemon launches runs via the , creating a run worker [job](https://kubernetes.io/docs/concepts/workloads/controllers/job/) with the image specified in the user code deployment. @@ -227,12 +227,12 @@ kubectl config use-context dagster In this step, you'll build a Docker image containing your Dagster definitions and any dependencies needed to execute the business logic in your code. For reference, here is an example [Dockerfile](https://github.com/dagster-io/dagster/blob/master/python_modules/automation/automation/docker/images/user-code-example/Dockerfile) and the corresponding [user code directory](https://github.com/dagster-io/dagster/tree/master/examples/deploy_k8s/example_project). -Here, we install all the Dagster-related dependencies in the Dockerfile and then copy the directory with the implementation of the Dagster repository into the root folder. We'll need to remember the path of this repository in a [later step](/deployment/guides/kubernetes/deploying-with-helm#step-6-configure-your-user-deployment) to setup the gRPC server as a deployment. +This example installs all of the Dagster-related dependencies in the Dockerfile and then copies the directory with the implementation of the Dagster repository into the root folder. We'll need to remember the path of this repository in a [later step](/deployment/guides/kubernetes/deploying-with-helm#step-6-configure-your-user-deployment) to set up the gRPC server as a deployment. The example user code repository includes: - An `example_job` job that runs all ops in a single pod -- A `pod_per_op_job` job that uses the [`k8s_job_executor`](/\_apidocs/libraries/dagster-k8s#dagster_k8s.k8s_job_executor) to run each op in its own pod. **NOTE:** this job uses the [`s3_pickle_io_manager`](/\_apidocs/libraries/dagster-aws#dagster_aws.s3.s3\_pickle_io_manager), which requires [setting AWS credentials](/deployment/guides/aws#using-s3-for-io-management). +- A `pod_per_op_job` job that uses the to run each op in its own pod. **NOTE:** this job uses the , which requires [setting AWS credentials](/deployment/guides/aws#using-s3-for-io-management). - A `pod_per_op_celery_job` that is only useful for [Celery deployments](/deployment/guides/kubernetes/deploying-with-helm-advanced). For projects with many dependencies, we recommend publishing your Python project as a package and installing it in your Dockerfile. @@ -364,9 +364,9 @@ dagster-user-deployments: - "/example_project/example_repo/repo.py" port: 3030 envConfigMaps: - - my-config-map + - name: my-config-map envSecrets: - - my-secret + - name: my-secret labels: foo_label: bar_value volumes: diff --git a/docs/content/deployment/guides/service.mdx b/docs/content/deployment/guides/service.mdx index b7c441fd8718f..dab906ece55da 100644 --- a/docs/content/deployment/guides/service.mdx +++ b/docs/content/deployment/guides/service.mdx @@ -25,7 +25,7 @@ In this configuration, the webserver will write execution logs to `$DAGSTER_HOME ## Running the Dagster daemon -If you're using [schedules](/concepts/partitions-schedules-sensors/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), or [backfills](/concepts/partitions-schedules-sensors/backfills), or want to set limits on the number of runs that can be executed at once, you'll want to also run a [dagster-daemon service](/deployment/dagster-daemon) as part of your deployment. To run this service locally, run the following command: +If you're using [schedules](/concepts/automation/schedules), [sensors](/concepts/partitions-schedules-sensors/sensors), or [backfills](/concepts/partitions-schedules-sensors/backfills), or want to set limits on the number of runs that can be executed at once, you'll want to also run a [dagster-daemon service](/deployment/dagster-daemon) as part of your deployment. To run this service locally, run the following command: ```shell pip install dagster diff --git a/docs/content/deployment/open-source.mdx b/docs/content/deployment/open-source.mdx deleted file mode 100644 index fc8dfa9c45a85..0000000000000 --- a/docs/content/deployment/open-source.mdx +++ /dev/null @@ -1,125 +0,0 @@ ---- -title: Deploying Dagster with your infrastructure | Dagster Docs ---- - -# Deploying Dagster with your infrastructure - - - This section is applicable to Dagster Open Source. If you're looking for - Dagster Cloud, click here. - - -Learn how to deploy and execute Dagster with these hands-on guides. - ---- - -## Overview - -Get started with a look at Dagster's architecture, including a primer on deployment concepts. [Learn more](/deployment/overview). - ---- - -## Concepts - -Learn about the concepts relevant to deploying Dagster. - - - - - - - - - - - ---- - -## Guides - -- [Deploying Dagster](#deploying-dagster) -- [Executing Dagster](#executing-dagster) - -### Deploying Dagster - -Check out these guides to learn the basics of Dagster deployment, including setting up Dagster and running the Dagster webserver. - - - - - - - - - -**Using Helm?** Check out the following guides to walk you through Dagster deployment: - - - - - - - - -### Executing Dagster - -For more advanced job execution, you can submit to different compute substrates. These can be mixed and matched. For example, you can deploy the Dagster webserver on Amazon Web Services (AWS) EC2 and execute jobs on a Dask cluster. - - - - - diff --git a/docs/content/deployment/overview.mdx b/docs/content/deployment/overview.mdx index 554ec9a1ddf0d..77fb508b1ebf1 100644 --- a/docs/content/deployment/overview.mdx +++ b/docs/content/deployment/overview.mdx @@ -6,9 +6,9 @@ description: "Learn about the architecture of an Open Source Dagster deployment. # Open Source deployment architecture - This guide is applicable to Dagster Open Source (OSS) deployments. For Cloud - deployments, refer to the{" "} - Dagster Cloud documentation. + This guide is applicable to Dagster Open Source (OSS) deployments. For + Dagster+ deployments, refer to the{" "} + Dagster+ documentation. This page covers general information about deploying Dagster on your own infrastructure. For guides on specific platforms, refer to the [Deployment guides](/deployment/guides). diff --git a/docs/content/deployment/run-coordinator.mdx b/docs/content/deployment/run-coordinator.mdx index c4402b72127ce..2fc0c4707fecd 100644 --- a/docs/content/deployment/run-coordinator.mdx +++ b/docs/content/deployment/run-coordinator.mdx @@ -113,8 +113,8 @@ However, if using the `QueuedRunCoordinator` or building a custom implementation href="/deployment/run-launcher" > - This guide is applicable to Dagster Open Source (OSS) deployments. For Dagster - Cloud, refer to the Dagster Cloud documentation. + This guide is applicable to Dagster Open Source (OSS) deployments. For + Dagster+, refer to the Dagster+ documentation. Runs instigated from the Dagster UI, the scheduler, or the `dagster job launch` CLI command are launched in Dagster. This is a distinct operation from executing a job using the `execute_job` Python API or the CLI `execute` command. A launch operation allocates computational resources (e.g. a process, a container, a Kubernetes pod, etc) to carry out a run execution and then instigates the execution. diff --git a/docs/content/deployment/run-monitoring.mdx b/docs/content/deployment/run-monitoring.mdx index 1848ac829affe..5bafc1acdd697 100644 --- a/docs/content/deployment/run-monitoring.mdx +++ b/docs/content/deployment/run-monitoring.mdx @@ -21,6 +21,14 @@ run_monitoring: poll_interval_seconds: 120 ``` + + In Dagster+ Run Monitoring is always enabled and can be configured in{" "} + + deployment settings + + . + + ## Run start timeouts When Dagster launches a run, the run stays in STARTING status until the run worker spins up and marks the run as STARTED. In the event that some failure causes the run worker to not spin up, the run might be stuck in STARTING status. The `start_timeout_seconds` offers a time limit for how long runs can hang in this state before being marked as failed. @@ -31,21 +39,35 @@ When Dagster terminates a run, the run moves into CANCELING status and sends a t ## General run timeouts -After a run is marked as STARTED, it may hang indefinitely for various reasons (user API errors, network issues, etc.). `MAX_RUNTIME_SECONDS_TAG` can be used to set a timeout on a per-run basis. If the run exceeds this timeout, and run monitoring is enabled, it will be marked as failed. +After a run is marked as STARTED, it may hang indefinitely for various reasons (user API errors, network issues, etc.). You can configure a maximum runtime for every run in a deployment by setting the `run_monitoring.max_runtime_seconds` field in your dagster.yaml or (Dagster+ deployment settings)\[dagster-plus/managing-deployments/deployment-settings-reference] to the maximum runtime in seconds. If a run exceeds this timeout and run monitoring is enabled, it will be marked as failed. The `dagster/max_runtime` tag can also be used to set a timeout in seconds on a per-run basis. + +For example, to configure a maximum of 2 hours for every run in your deployment: + +```yaml +run_monitoring: + enabled: true + max_runtime_seconds: 7200 +``` + +or in Dagster+, add the following to your [deployment settings](/dagster-plus/managing-deployments/deployment-settings-reference): + +```yaml +run_monitoring: + max_runtime_seconds: 7200 +``` The below code example shows how to set a run timeout of 10 seconds on a per-job basis: ```python file=/deploying/monitoring_daemon/run_timeouts.py startafter=start_timeout -from dagster import MAX_RUNTIME_SECONDS_TAG, define_asset_job, job +from dagster import define_asset_job, job -@job(tags={MAX_RUNTIME_SECONDS_TAG: 10}) -def my_job(): - ... +@job(tags={"dagster/max_runtime": 10}) +def my_job(): ... asset_job = define_asset_job( - name="some_job", selection="*", tags={MAX_RUNTIME_SECONDS_TAG: 10} + name="some_job", selection="*", tags={"dagster/max_runtime": 10} ) # end_timeout ``` diff --git a/docs/content/deployment/run-retries.mdx b/docs/content/deployment/run-retries.mdx index 957c719de7658..00f0012e24e70 100644 --- a/docs/content/deployment/run-retries.mdx +++ b/docs/content/deployment/run-retries.mdx @@ -5,19 +5,24 @@ description: Automatically retry Dagster runs # Run Retries -If you configure retries at the [Job](/\_apidocs/jobs#jobs) level, a new run will be kicked off when a run for that job fails. Compared to [Op retries](/concepts/ops-jobs-graphs/op-retries), the max retry limit for run retries applies to the whole run instead of each individual Op. Run retries also handle the cases where a run worker crashed. +If you configure run retries, a new run will be kicked off whenever a run fails for any reason. Compared to [op retries](/concepts/ops-jobs-graphs/op-retries), the maximum retry limit for run retries applies to the whole run instead of each individual op. Run retries also handle the case where the run process crashes or is unexpectedly terminated. ## Configuration -To enable run retries, add the following to your `dagster.yaml`. This will start a new daemon which polls to the event log for run failure events. +How to configure run retries depends on whether you're using Dagster+ or Dagster Open Source: -```yaml file=/deploying/dagster_instance/dagster.yaml startafter=start_run_retries endbefore=end_run_retries +- **Dagster+**: Use the [Dagster+ UI or the dagster-cloud CLI](/dagster-plus/managing-deployments/managing-deployments#configuring-deployment-settings) to set a default maximum number of retries. Run retries do not need to be explicitly enabled. +- **Dagster Open Source**: Use your instance's `dagster.yaml` to enable run retries. + +For example, the following will set a default maximum number of retries of `3` for all runs: + +```yaml run_retries: - enabled: true - max_retries: 3 # Sets a default for all jobs. 0 if not set + enabled: true # Omit this key if using Dagster+, since run retries are enabled by default + max_retries: 3 ``` -You can also configure retries using tags either on Job definitions or in the Dagster UI [Launchpad](/concepts/webserver/ui#launchpad-tab). +In both Dagster+ and Dagster Open Source, you can also configure retries using tags either on Job definitions or in the Dagster UI [Launchpad](/concepts/webserver/ui#launchpad-tab). ```python file=/deploying/job_retries.py from dagster import job @@ -35,8 +40,36 @@ def other_sample_sample_job(): ### Retry Strategy -The `dagster/retry_strategy` tag controls which Ops the retry will run. +The `dagster/retry_strategy` tag controls which ops the retry will run. + +By default, retries will re-execute from failure (tag value `FROM_FAILURE`). This means that any successful ops will be skipped, but their output will be used for downstream ops. If the `dagster/retry_strategy` tag is set to `ALL_STEPS`, all the ops will run again. + +**Note:** `FROM_FAILURE` requires an I/O manager that can access outputs from other runs. For example, on Kubernetes the would work but the would not, since the new run is in a new Kubernetes job with a separate filesystem. + +### Combining op and run retries + +By default, if a run fails due to an op failure and both op and run retries are enabled, the overlapping retries might cause the op to be retried more times than desired. This is because the op retry count will reset for each retried run. + +To prevent this, you can configure run retries to only retry when the failure is for a reason other than an op failure, like a crash or an unexpected termination of the run worker. This behavior is controlled by the `run_retries.retry_on_asset_or_op_failure` setting, which defaults to `true` but can be overridden to `false`. + +For example, the following configures run retries so that they ignore runs that failed due to a step failure: + +```yaml +run_retries: + enabled: true # Omit this key if using Dagster+, since run retries are enabled by default + max_retries: 3 + retry_on_asset_or_op_failure: false +``` + +You can also apply the `dagster/retry_on_asset_or_op_failure` tag on specific jobs using tags to override the default value for runs of that job: + +```python +from dagster import job -By default, retries will re-execute from failure (tag value `FROM_FAILURE`). This means that any successful Ops will be skipped, but their output will be used for downstream Ops. If the `dagster/retry_strategy` tag is set to `ALL_STEPS`, all the Ops will run again. -NOTE: `FROM_FAILURE` requires an IOManager that can access outputs from other runs. For example, on Kubernetes the [s3\_pickle_io_manager](/\_apidocs/libraries/dagster-aws#dagster_aws.s3.s3\_pickle_io_manager) would work but the [`FilesytemIOManager`](https://docs.dagster.io/\_apidocs/io-managers#dagster.FilesytemIOManager) would not, since the new run is in a new Kubernetes Job with a separate filesystem. +@job(tags={"dagster/max_retries": 3, "dagster/retry_on_asset_or_op_failure": False}) +def sample_job(): + pass +``` + +**Note:** Setting `retry_on_asset_or_op_failure` to `false` will only change retry behavior for runs on Dagster version 1.6.7 or greater. diff --git a/docs/content/getting-started.mdx b/docs/content/getting-started.mdx index 4c85370483113..231bb752c046a 100644 --- a/docs/content/getting-started.mdx +++ b/docs/content/getting-started.mdx @@ -8,12 +8,12 @@ Dagster is an orchestrator that's designed for developing and maintaining data a You declare functions that you want to run and the data assets that those functions produce or update. Dagster then helps you run your functions at the right time and keep your assets up-to-date. -Dagster is built to be used at every stage of the data development lifecycle - local development, unit tests, integration tests, staging environments, all the way up to production. +Dagster is designed to be used at every stage of the data development lifecycle, including local development, unit tests, integration tests, staging environments, and production. -**New to Dagster**? Check out the **Hello Dagster example**, learn with some hands-on **Tutorials**, or dive into **Concepts**. For an in-depth learning experience, enroll in **Dagster University**. +**New to Dagster**? Check out the **Quickstart**, learn with some hands-on **Tutorials**, or dive into **Concepts**. For an in-depth learning experience, enroll in **Dagster University**. -
      - +
      + @@ -70,7 +70,7 @@ Use one of our examples to explore Dagster concepts, integrations, and realistic href="https://github.com/dagster-io/dagster/blob/master/examples" > @@ -93,8 +93,8 @@ Use one of our examples to explore Dagster concepts, integrations, and realistic href="/deployment/guides/kubernetes/deploying-with-helm" > diff --git a/docs/content/getting-started/create-new-project.mdx b/docs/content/getting-started/create-new-project.mdx index b0bc243a346d9..0d632583a881f 100644 --- a/docs/content/getting-started/create-new-project.mdx +++ b/docs/content/getting-started/create-new-project.mdx @@ -125,7 +125,7 @@ Once your project is ready to move to production, check out our recommendations Check out the following resources to learn more about deployment options: -- [Dagster Cloud](/dagster-cloud) - Deploy using Dagster-managed infrastructure +- [Dagster+](/dagster-plus) - Deploy using Dagster-managed infrastructure - [Your own infrastructure](/deployment) - Deploy to your infrastructure, such as Docker, Kubernetes, Amazon Web Services, etc. --- diff --git a/docs/content/getting-started/getting-help.mdx b/docs/content/getting-started/getting-help.mdx index bc82d601a0c24..91adea55eec00 100644 --- a/docs/content/getting-started/getting-help.mdx +++ b/docs/content/getting-started/getting-help.mdx @@ -1,82 +1,197 @@ --- -title: Getting Help | Dagster +title: Getting help | Dagster description: Have questions about how to use Dagster? Hit a bug? Have a feature request? This page includes tips on what to do. --- -# Getting Help +# Getting help -Have questions about how to use Dagster? Hit a bug? Have a feature request? The Dagster Labs team and the Dagster community make a best-effort attempt to respond, on a few different platforms. +Have questions about how to use Dagster? Hit a bug? Have a feature request? The Dagster Labs team and the Dagster community make a best-effort attempt to respond on a few different platforms. --- -## Searching for answers +## Available resources + +Where you go to get support can vary depending on what you want to accomplish. Refer to the following table for details about the resources available to you and when they should be used. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      + I want to... + You should use...
      + Generally search for an answer on the internet + + A search engine. Search engines like Google generally + do a good job at surfacing relevant Dagster docs, Github Discussions, + and Github Issues, so we recommend starting there. +
      +
      + Unfortunately, ChatGPT is not currently a great resource for answering questions + about Dagster because its training data doesn’t include Dagster’s latest + APIs. +
      + Use AI to self-serve + + Dagster's trained Language Learning Model (LLM).{" "} + Accessible in the Dagster Slack's{" "} + #ask-ai{" "} + channel or by clicking the Ask AI button on the docs + site. +
      + File a bug or feature request + + + + GitHub Issues + + + . We use GitHub Issues to track all community requests, whether they're created + by Dagster team members or community members. +
      +
      + If you find an issue that seems related to your situation but you're + unsure, don't be shy! We can redirect it to a different issue if + necessary. Check out the Tips section for tips on{" "} + including code snippets and{" "} + filing bugs. +
      + + Ask a question that may be helpful to others, or make a question + visible to the Dagster team + + + + GitHub Discussions + + . This is the main Q&A site for Dagster. We recommend posting most questions + here as they'll show up in search engines and can then benefit others. Check + out the Tips section for tips on including code snippets + and how to ask questions that get answered. +
      + + Participate in real-time discussion with other Dagster users + + + Dagster community Slack. We + strongly encourage you to join Dagster's Slack, as it's the main + real-time gathering place for the community. Check out the{" "} + Tips section for tips on including code snippets and + how to ask questions that get answered. +
      -Search engines like Google generally do a good job at surfacing relevant Dagster docs, Github Discussions and Github Issues, so we recommend starting there. - -Unfortunately, ChatGPT is not currently a great resource for answering questions about Dagster, because the corpus it was trained on doesn’t include Dagster’s latest APIs. +--- -However, one can interact with a fine-tuned Language Learning Model (LLM) trained on Dagster docs in the [#ask-ai](https://dagster.slack.com/archives/C066HKS7EG1) channel of the Dagster Slack. +## Tips ---- +### Formatting code snippets -## Bugs and feature requests +#### Creating code blocks with Markdown -We use [Github Issues](https://github.com/dagster-io/dagster/issues) to track all bugs and feature requests from the community, and we welcome issues submitted by users. +When posting in GitHub or Slack, including a code snippet can help others understand your question. Using Markdown to format the content as code improves readability. For example: -If you find a Github issue that seems like it’s related to the issue you’re experiencing, but you're not sure, don’t be shy about posting on that Github issue to ask. We can redirect it to a different issue if necessary. + ``` # backticks create a code block + from dagster import ... + ... + ``` -### Tips for filing issues +#### Adding syntax highlighting -- For bugs, include the minimum-sized working code snippet that reproduces your issue. -- If your Github issue includes a code snippet, you can add syntax highlighting by specifying the language at the top of the block: +On GitHub - whether in issues or discussions - you can add syntax highlighting to code snippets to further improve readability. This is done by specifying the language at the top of the Markdown code block: - ```python + ```python ## specifies language from dagster import ... ... ``` ---- +### Reporting bugs + +When filing a [GitHub issue](https://github.com/dagster-io/dagster/issues) for a bug: -## Questions +- **DO** include the minimum-sized working code snippet that reproduces the issue +- **DO** include the steps you took to create the issue, if the issue occurred in the Dagster UI +- **DO** include the Dagster version being used, if applicable +- **DON'T** include any sensitive information -If you're trying to find out the best way to use Dagster for your use case, there are a couple places to get help. +### Asking questions that get answers -### Github Discussions: the preferred place to ask questions +When posting questions in [GitHub Discussions](https://github.com/dagster-io/dagster/discussions) or [Slack](https://dagster.io/slack), we recommend: -[Github Discussions](https://github.com/dagster-io/dagster/discussions) is the main Q & A site for Dagster. For questions whose answers might be useful to others (which is most questions), Github Discussions is the best place to ask them, because they show up when others Google the same questions. +- Being clear, specific, and as brief as possible +- Not including lots of irrelevant details -**Tip:** if your Github discussion includes a code snippet, add syntax highlighting by specifying the language at the top of the block: +Generally, the less time and effort it takes for someone to digest your question, the more likely it will get answered. Check out the following sections for examples of well-phrased questions and those that could use some improvement. - ```python - from dagster import ... +**Question 1** - ... - ``` +> **Is it possible to set up Dagster to automatically update an entire downstream table every time that a specific partition of an upstream table is updated?** We have a table that's partitioned by customer, and we have a specific analysis that we want to do on a specific customer. -### Dagster Slack: the real-time gathering place for the community +✅ **Good question!** -We strongly encourage you to join [Dagster's Slack](https://dagster.io/slack), as it's the main real-time gathering place for the community. However, if you want a question or issue to be seen by the Dagster team, the best place to post it is in Github. +This question is brief enough to be quickly digested, but specific enough to be complete. -Check out the #faq-read-me-before-posting channel for more info. +--- -### Asking digestible questions +**Question 2** -The less time and effort it takes for someone to digest your question, the more likely it will get answered. The easiest questions to answer include enough information to be clear and specific, but don't require the reader to understand details that aren't relevant. +> **Is it possible to set up Dagster to automatically update an entire downstream table every time that a specific partition of an upstream table is updated?** We have a table that's partitioned by customer, and we have a specific analysis that we want to do on a specific customer. +> +> The background here is that that we've set up Fivetran to pulls in our per-customer Salesforce tables into one big Snowflake table. We have about 500 customers, and they get pulled in usually daily, but it depends on the customer. For about 20 of these customers, we have custom logic, implemented in SQL with dbt, that performs analyses that are specific to those customers. We want those analyses to run automatically when customer data is updated, so that our sales representatives can discover red flags when they check their Looker dashboards. -Below is an example of a well-phrased question. It's brief enough to be quickly digestible, but specific enough to be complete: +✅ **Good question!** -> Is it possible to set up Dagster to automatically update an entire downstream table every time that a specific partition of an upstream table is updated? We have a table that's partitioned by customer, and we have a specific analysis that we want to do on a specific customer. +This version of the first question provides additional background information, but doesn't bury the core question. This can be helpful because it allows readers to more deeply understand the situation and provide broader suggestions. -Below is another example of a well-phrased question. It's the same as the question above, but also includes background context. As long as the background context doesn't obscure the core question, it's really helpful to include, because it allows the reader to understand the question more deeply and make broader suggestions: +--- -> Is it possible to set up Dagster to automatically update an entire downstream table every time that a specific partition of an upstream table is updated? We have a table that's partitioned by customer, and we have a specific analysis that we want to do on a specific customer. +**Question 3** -> The background here is that that we've set up Fivetran to pulls in our per-customer Salesforce tables into one big Snowflake table. We have about 500 customers, and they get pulled in usually daily, but it depends on the customer. For about 20 of these customers, we have custom logic, implemented in SQL with dbt, that performs analyses that are specific to those customers. We want those analyses to run automatically when customer data is updated, so that our sales representatives can discover red flags when they check their Looker dashboards. +> **How do I use tables with specific upstream partitions?** + +❌ **Needs improvement** + +This question needs more detail to allow the reader to provide appropriate suggestions. + +--- -Below is an example of a question that doesn't provide enough detail for the reader to answer it: +**Question 4** -> How do I use tables with specific upstream partitions? +> We've set up Fivetran to pulls in our per-customer Salesforce tables into one big Snowflake table. We have about 500 customers, and they get pulled in usually daily, but it depends on the customer. For about 20 of these customers, we have custom logic, implemented in SQL with dbt, that performs analyses that are specific to those customers. We want those analyses to run automatically when customer data is updated, so that our sales representatives can discover red flags when they check their Looker dashboards. **How can I get this working with Dagster?** -Below is an example a question that's difficult to digest. It requires the reader to understand an entire data pipeline to be able to answer the specific question: +❌ **Needs improvement** -> We've set up Fivetran to pulls in our per-customer Salesforce tables into one big Snowflake table. We have about 500 customers, and they get pulled in usually daily, but it depends on the customer. For about 20 of these customers, we have custom logic, implemented in SQL with dbt, that performs analyses that are specific to those customers. We want those analyses to run automatically when customer data is updated, so that our sales representatives can discover red flags when they check their Looker dashboards. How can I get this working with Dagster? +This question requires understanding an entire data pipeline to answer the core question. diff --git a/docs/content/getting-started/hello-dagster.mdx b/docs/content/getting-started/hello-dagster.mdx deleted file mode 100644 index d6a16aa1fa120..0000000000000 --- a/docs/content/getting-started/hello-dagster.mdx +++ /dev/null @@ -1,144 +0,0 @@ ---- -title: Hello Dagster | Dagster Docs -description: Run dagster for the first time ---- - -# Hello Dagster - -Welcome to Dagster! In this guide, you'll build a simple data pipeline in Dagster that downloads the top 10 HackerNews stories. In three quick steps, you'll have functional code and begin exploring Dagster's user interface. - - - - -Make sure you have one of these versions of Python installed before continuing. - -Let's get started! - ---- - -## Step 1: Create hello-dagster.py - -Create a file named `hello-dagster.py` that contains the following code: - -```python file=/getting-started/hello-dagster/hello-dagster.py -import json - -import pandas as pd -import requests - -from dagster import AssetExecutionContext, MetadataValue, asset - - -@asset -def hackernews_top_story_ids(): - """Get top stories from the HackerNews top stories endpoint. - - API Docs: https://github.com/HackerNews/API#new-top-and-best-stories. - """ - top_story_ids = requests.get( - "https://hacker-news.firebaseio.com/v0/topstories.json" - ).json() - - with open("hackernews_top_story_ids.json", "w") as f: - json.dump(top_story_ids[:10], f) - - -# asset dependencies can be inferred from parameter names -@asset(deps=[hackernews_top_story_ids]) -def hackernews_top_stories(context: AssetExecutionContext): - """Get items based on story ids from the HackerNews items endpoint.""" - with open("hackernews_top_story_ids.json", "r") as f: - hackernews_top_story_ids = json.load(f) - - results = [] - for item_id in hackernews_top_story_ids: - item = requests.get( - f"https://hacker-news.firebaseio.com/v0/item/{item_id}.json" - ).json() - results.append(item) - - df = pd.DataFrame(results) - df.to_csv("hackernews_top_stories.csv") - - # recorded metadata can be customized - metadata = { - "num_records": len(df), - "preview": MetadataValue.md(df[["title", "by", "url"]].to_markdown()), - } - - context.add_output_metadata(metadata=metadata) -``` - ---- - -## Step 2: Install Python packages - -Next, install the Python packages you'll need to run your code in your favorite Python environment: - -```bash -# run in a terminal in your favorite python environment -pip install dagster dagster-webserver pandas -``` - -Unsure? Check out the [installation guide](/getting-started/install). - ---- - -## Step 3: Start the Dagster UI and materialize assets - -1. In the same directory as `hello-dagster.py`, run `dagster dev`. This command starts a web server to host Dagster's user interface: - - ```bash - # run in a terminal in your favorite python environment - dagster dev -f hello-dagster.py - ``` - -2. In your browser, navigate to [http://localhost:3000/](http://localhost:3000). - -3. Click **Materialize All** to run the pipeline and create your assets. Materializing an asset runs the asset function and saves the result. This pipeline uses the Dagster defaults to save the result to a pickle file on disk. - - HackerNews assets in Dagster's Asset Graph, unmaterialized - -That's it! You now have two materialized Dagster assets: - - - -But wait - there's more. Because the `hackernews_top_stories` asset specified `metadata`, you can view the metadata right in the UI: - -1. Click the asset. -2. In the sidebar that displays, click the **Show Markdown** link in the **Materialization in Last Run** section. This opens a preview of the pipeline result, allowing you to view the top 10 HackerNews stories: - - Markdown preview of HackerNews top 10 stories - ---- - -## Next steps - -Congrats on your first Dagster pipeline! This example used [assets](/tutorial), which most Dagster projects utilize because they let data engineers: - -- Think in the same terms as stakeholders -- Answer questions about data quality and lineage -- Work with the modern data stack (dbt, Airbyte/Fivetran, Spark) -- Create declarative freshness policies instead of task-driven cron schedules - -Dagster also offers [ops and jobs](/guides/dagster/intro-to-ops-jobs), but we recommend starting with assets. - -While this example used a single file, most Dagster projects are organized as Python packages. From here, you can: - -- Start with a scaffolded blank project. Check out the [new project guide](/getting-started/create-new-project) for more info. -- Start with an official example, such as the [dbt + Dagster project](/integrations/dbt/using-dbt-with-dagster). Check out [all the examples in GitHub](https://github.com/dagster-io/dagster/tree/master/examples). diff --git a/docs/content/getting-started/quickstart.mdx b/docs/content/getting-started/quickstart.mdx new file mode 100644 index 0000000000000..d76a9b8359030 --- /dev/null +++ b/docs/content/getting-started/quickstart.mdx @@ -0,0 +1,196 @@ +--- +title: Quickstart | Dagster Docs +description: Run dagster for the first time +--- + +# Quickstart + + + Looking to scaffold a new project? Check out the{" "} + Creating a new project{" "} + guide! + + +Welcome to Dagster! This guide will help you quickly run the [Dagster Quickstart](https://github.com/dagster-io/dagster-quickstart) project, showcasing Dagster's capabilities and serving as a foundation for exploring its features. + +The [Dagster Quickstart](https://github.com/dagster-io/dagster-quickstart) project can be used without installing anything on your machine by using the pre-configured [GitHub Codespace](https://github.com/features/codespaces). If you prefer to run things on your own machine, however, we've got you covered. + + + + +### Option 1: Running Locally + + + +Ensure you have one of the supported Python versions installed before proceeding. + +Refer to Python's official getting started guide, or our recommendation of using pyenv for installing Python. + +1. Clone the Dagster Quickstart repository by executing: + + ```bash + git clone https://github.com/dagster-io/dagster-quickstart && cd dagster-quickstart + ``` + +2. Install the necessary dependencies using the following command: + + We use `-e` to install dependencies in ["editable mode"](https://pip.pypa.io/en/latest/topics/local-project-installs/#editable-installs). This allows changes to be automatically applied when we modify code. + + ```bash + pip install -e ".[dev]" + ``` + +3. Run the project! + + ```bash + dagster dev + ``` + +4. Navigate to localhost:3000 in your web browser. + +5. **Success!** + + + + +### Option 2: Using GitHub Codespaces + +1. Fork the [Dagster Quickstart](https://github.com/dagster-io/dagster-quickstart) repository + +2. Select **Create codespace on main** from the **Code** dropdown menu. + + + +3. After the codespace loads, start Dagster by running `dagster dev` in the terminal: + + ```bash + dagster dev + ``` + +4. Click **Open in Browser** when prompted. + + + +5. **Success!** + + + + +## Navigating the User Interface + +You should now have a running instance of Dagster! From here, we can run our data pipeline. + +To run the pipeline, click the **Materialize All** button in the top right. In Dagster, _materialization_ refers to executing the code associated with an asset to produce an output. + + + +Congratulations! You have successfully materialized two Dagster assets: + + + +But wait - there's more. Because the `hackernews_top_stories` asset returned some `metadata`, you can view the metadata right in the UI: + +1. Click the asset +2. In the sidebar, click the **Show Markdown** link in the **Materialization in Last Run** section. This opens a preview of the pipeline result, allowing you to view the top 10 HackerNews stories: + + + +## Understanding the Code + +The Quickstart project defines two **Assets** using the decorator: + +- `hackernews_top_story_ids` retrieves the top stories from the Hacker News API and saves them as a JSON file. +- `hackernews_top_stories` asset builds upon the first asset, retrieving data for each story as a CSV file, and returns a `MaterializeResult` with a markdown preview of the top stories. + +```python file=/getting-started/quickstart/assets.py +import json + +import pandas as pd +import requests + +from dagster import Config, MaterializeResult, MetadataValue, asset + + +class HNStoriesConfig(Config): + top_stories_limit: int = 10 + hn_top_story_ids_path: str = "hackernews_top_story_ids.json" + hn_top_stories_path: str = "hackernews_top_stories.csv" + + +@asset +def hackernews_top_story_ids(config: HNStoriesConfig): + """Get top stories from the HackerNews top stories endpoint.""" + top_story_ids = requests.get( + "https://hacker-news.firebaseio.com/v0/topstories.json" + ).json() + + with open(config.hn_top_story_ids_path, "w") as f: + json.dump(top_story_ids[: config.top_stories_limit], f) + + +@asset(deps=[hackernews_top_story_ids]) +def hackernews_top_stories(config: HNStoriesConfig) -> MaterializeResult: + """Get items based on story ids from the HackerNews items endpoint.""" + with open(config.hn_top_story_ids_path, "r") as f: + hackernews_top_story_ids = json.load(f) + + results = [] + for item_id in hackernews_top_story_ids: + item = requests.get( + f"https://hacker-news.firebaseio.com/v0/item/{item_id}.json" + ).json() + results.append(item) + + df = pd.DataFrame(results) + df.to_csv(config.hn_top_stories_path) + + return MaterializeResult( + metadata={ + "num_records": len(df), + "preview": MetadataValue.md(str(df[["title", "by", "url"]].to_markdown())), + } + ) +``` + +--- + +## Next steps + +Congratulations on successfully running your first Dagster pipeline! In this example, we used [assets](/tutorial), which are a cornerstone of Dagster projects. They empower data engineers to: + +- Think in the same terms as stakeholders +- Answer questions about data quality and lineage +- Work with the modern data stack (dbt, Airbyte/Fivetran, Spark) +- Create declarative freshness policies instead of task-driven cron schedules + +Dagster also offers [ops and jobs](/guides/dagster/intro-to-ops-jobs), but we recommend starting with assets. + +To create your own project, consider the following options: + +- Scaffold a new project using our [new project guide](/getting-started/create-new-project). +- Begin with an official example, like the [dbt & Dagster project](/integrations/dbt/using-dbt-with-dagster), and explore [all examples on GitHub](https://github.com/dagster-io/dagster/tree/master/examples). diff --git a/docs/content/getting-started/what-why-dagster.mdx b/docs/content/getting-started/what-why-dagster.mdx index 48137c8138e2a..40190556a1d90 100644 --- a/docs/content/getting-started/what-why-dagster.mdx +++ b/docs/content/getting-started/what-why-dagster.mdx @@ -42,6 +42,7 @@ Dagster’s asset-centric approach to building data pipelines makes it easy to: - **Understand how an asset - like a database table or report - is produced.** Everyone in your organization can understand the data lineage and how data assets relate to each other, even if they didn’t build the pipeline themselves. - **Determine if an asset is up to date.** It’s easy to tell exactly why assets are out-of-date, whether it might be late upstream data or errors in code. +- **Diagnose data quality issues.** Building data quality checks into your pipelines is straightforward, and you can be notified automatically when data quality issues arise. - **Standardize best practices.** [Software-defined Assets (SDAs)](/concepts/assets/software-defined-assets), the Dagster concept that produces data assets, are a unifying abstraction across all data teams. SDAs enable easier collaboration and rapid adoption of best practices such as domain-specific languages, continuous integration, local development, and testing. - **Simplify debugging.** Every run and computation is tied to the goal of producing data, so debugging tools like logs are specific to the assets being produced. When something goes wrong, you can quickly identify the problematic asset, address it, and only need to re-execute that asset. @@ -53,7 +54,7 @@ Additionally, Dagster is accompanied by a sleek, modern, [web-based UI](/concept ## How does it work? -If you want to try running Dagster yourself, check out the [Hello, Dagster!](/getting-started/hello-dagster) quickstart. +If you want to try running Dagster yourself, check out the Dagster [Quickstart](/getting-started/quickstart). --- @@ -61,7 +62,7 @@ If you want to try running Dagster yourself, check out the [Hello, Dagster!](/ge Ready to dive in? Depending on your learning preferences, there are a few ways to get started: -- [Dagster University](https://courses.dagster.io) - An in-depth, step-by-step course with quizzes and practice problems focusing on the essentials of building in Dagster. Completing the course takes from six to 10 hours. +- [Dagster University](https://courses.dagster.io) - In-depth, step-by-step courses with quizzes and practice problems focusing on building in Dagster. - [Tutorial](/tutorial) - Not as lengthy or detailed as Dagster University, the tutorial focuses on building a small data pipeline that retrieves data from HackerNews. - [Concepts](/concepts) - Detailed information about Dagster’s core concepts. - [Guides](/guides) - Step-by-step guides for completing specific tasks with Dagster. diff --git a/docs/content/guides.mdx b/docs/content/guides.mdx index 77a57d514b704..c38ed02c9be5d 100644 --- a/docs/content/guides.mdx +++ b/docs/content/guides.mdx @@ -16,19 +16,21 @@ Learn to apply [Dagster concepts](/concepts) to your work, explore experimental - [Transitioning data pipelines from development to production](/guides/dagster/transitioning-data-pipelines-from-development-to-production) - Learn how to seamlessly transition your Dagster pipelines from local development to production -- [Testing against production with Dagster Cloud Branch Deployments](/guides/dagster/branch_deployments) - Use Dagster Cloud Branch Deployments to quickly iterate on your Dagster code without impacting production data +- [Testing against production with Dagster+ Branch Deployments](/guides/dagster/branch_deployments) - Use Dagster+ Branch Deployments to quickly iterate on your Dagster code without impacting production data --- ## Working with data assets -- [Understanding how assets relate to ops and graphs](/guides/dagster/how-assets-relate-to-ops-and-graphs) - Learn how software-defined assets relate to ops and graphs, and when to use one over the other +- [Understanding how assets relate to ops and graphs](/guides/dagster/how-assets-relate-to-ops-and-graphs) - Learn how asset definitions relate to ops and graphs, and when to use one over the other -- [Moving to Software-defined Assets](/guides/dagster/enriching-with-software-defined-assets) - Already using ops and graphs, but not Software-defined Assets? Learn why and how to use Software-defined Assets +- [Moving to asset definitions](/guides/dagster/enriching-with-software-defined-assets) - Already using ops and graphs, but not asset definitions? Learn why and how to use asset definitions -- [Using Software-defined assets with Pandas and PySpark](/guides/dagster/software-defined-assets) - A quick introduction to Software-defined Assets, featuring Pandas and PySpark +- [Using asset checks to check data freshness](/concepts/assets/asset-checks/checking-for-data-freshness) - Use freshness checks, a type of [asset check](/concepts/assets/asset-checks) to identify the data assets that are overdue for an update -- [Testing assets](/guides/dagster/testing-assets) - Learn to test your Software-defined Assets +- [Using asset definitions with Pandas and PySpark](/guides/dagster/software-defined-assets) - A quick introduction to asset definitions, featuring Pandas and PySpark + +- [Testing assets](/guides/dagster/testing-assets) - Learn to test your asset definitions - [Migrating to Pythonic resources and config](/guides/dagster/migrating-to-pythonic-resources-and-config) - Incrementally migrate existing Dagster codebases to Pythonic resources and config @@ -44,19 +46,9 @@ Learn to apply [Dagster concepts](/concepts) to your work, explore experimental --- -## Dagster Pipes (Experimental) - -- [Dagster Pipes](/guides/dagster-pipes) - A high-level look at Dagster Pipes, a toolkit for building integrations between Dagster and external execution environments - -- [Dagster Pipes tutorial](/guides/dagster-pipes/subprocess) - Get started using Dagster Pipes with this tutorial, where you'll use a subprocess to execute code in an external environment - -- [Dagster Pipes details and customization](/guides/dagster-pipes/dagster-pipes-details-and-customization) - Learn about Dagster Pipes APIs and how to compose them to create a custom solution for your data platform - ---- - ## Migrating to Dagster -- [Migrating from Airflow to Dagster](/integrations/airflow/migrating-to-dagster) - Perform a lift-and-shift migration from Airflow to Dagster +- [Migrating Airflow to Dagster](/guides/migrations/migrating-airflow-to-dagster) - Learn how to migrate your pipelines from Airflow to Dagster --- @@ -86,4 +78,6 @@ Learn to apply [Dagster concepts](/concepts) to your work, explore experimental - [Asset versioning and caching](/guides/dagster/asset-versioning-and-caching) - Memoize assets using Dagster's data versioning system +- [Linking to asset definition code with code references](/guides/dagster/code-references) - Attach code references to your Dagster assets to easily navigate to the code that backs the asset + [def]: /guides/dagster/ diff --git a/docs/content/guides/customizing-run-queue-priority.mdx b/docs/content/guides/customizing-run-queue-priority.mdx index 9fa8e8f15afe9..da40cf8368c95 100644 --- a/docs/content/guides/customizing-run-queue-priority.mdx +++ b/docs/content/guides/customizing-run-queue-priority.mdx @@ -51,8 +51,7 @@ In this example, the priority is set to `-1` with a `dagster/priority` tag value ```python startafter=start_marker_priority endbefore=end_marker_priority file=/deploying/concurrency_limits/concurrency_limits.py @job(tags={"dagster/priority": "3"}) -def important_job(): - ... +def important_job(): ... @schedule( @@ -61,8 +60,7 @@ def important_job(): execution_timezone="US/Central", tags={"dagster/priority": "-1"}, ) -def less_important_schedule(_): - ... +def less_important_schedule(_): ... ``` @@ -133,8 +131,8 @@ To summarize, due to the concurrency limit, this configuration will change the r href="/concepts/assets/software-defined-assets" >
      - export DATABRICKS_TOKEN - ``` - ---- - -## Step 1: Create an asset computed in Databricks - -In this step, you’ll create a Dagster asset that, when materialized, opens a Dagster pipes session and launches a Databricks job. - -### Step 1.1: Define the Dagster asset - -In your Dagster project, create a file named `dagster_databricks_pipes.py` and paste in the following code: - -```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_client.py startafter=start_databricks_asset endbefore=end_databricks_asset -### dagster_databricks_pipes.py - -import os -import sys - -from dagster_databricks import PipesDatabricksClient - -from dagster import AssetExecutionContext, Definitions, EnvVar, asset -from databricks.sdk import WorkspaceClient -from databricks.sdk.service import jobs - - -@asset -def databricks_asset( - context: AssetExecutionContext, pipes_databricks: PipesDatabricksClient -): - task = jobs.SubmitTask.from_dict( - { - # The cluster settings below are somewhat arbitrary. Dagster Pipes is - # not dependent on a specific spark version, node type, or number of - # workers. - "new_cluster": { - "spark_version": "12.2.x-scala2.12", - "node_type_id": "i3.xlarge", - "num_workers": 0, - "cluster_log_conf": { - "dbfs": {"destination": "dbfs:/cluster-logs-dir-noexist"}, - }, - }, - "libraries": [ - # Include the latest published version of dagster-pipes on PyPI - # in the task environment - {"pypi": {"package": "dagster-pipes"}}, - ], - "task_key": "some-key", - "spark_python_task": { - "python_file": "dbfs:/my_python_script.py", # location of target code file - "source": jobs.Source.WORKSPACE, - }, - } - ) - - print("This will be forwarded back to Dagster stdout") # noqa: T201 - print("This will be forwarded back to Dagster stderr", file=sys.stderr) # noqa: T201 - - extras = {"some_parameter": 100} - - return pipes_databricks.run( - task=task, - context=context, - extras=extras, - ).get_materialize_result() -``` - -Let's review what's happening in this code: - -- **Includes a number of imports from Dagster and the Databricks SDK.** There are a few that aren't used in this code block, but will be later in this guide. - -- **Creates an asset named `databricks_asset`.** We also: - - - Provided as the `context` argument to the asset. This object provides access to system APIs such as resources, config, and logging. We’ll come back to this a bit later in this section. - - Specified a resource for the asset to use. We’ll also come back to this later. - -- **Defines a Databricks `SubmitTask` object in the asset body.** Coverage of all the fields on this object is beyond the scope of this guide, but you can find further information in the [Databricks SDK API docs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/jobs.html) and [source code](https://github.com/databricks/databricks-sdk-py/blob/main/databricks/sdk/service/jobs.py) for the `SubmitTask` object. - - The submitted task must: - - - **Specify `dagster-pipes` as a PyPI dependency**. You can include a version pin (e.g. `dagster-pipes==1.5.4`) if desired. - - **Use `new_cluster` as opposed to an existing cluster**. This is because environment variables are injected into the specification used to create the cluster. - - Use a `spark_python_task`. - - **Optionally include `new_cluster.cluster_log_conf.dbfs`**. If set, the will automatically set up objects for `stdout` and `stderr` of the driver node. These will periodically forward the `stdout` and `stderr` logs written by Databricks back to Dagster. **Note**: Because Databricks only updates these log files every five minutes, that is the maximum frequency at which Dagster can forward the logs. - -- **Defines an `extras` dictionary containing some arbitrary data (`some_parameter`).** This is where you can put various data, e.g. from the Dagster run config, that you want to be available in Databricks. Anything added here must be JSON-serializable. - -- **Passes the `SubmitTask` object, `AssetExecutionContext`, and `extras` dictionary to the `run` method of **. This method synchronously executes the Databricks job specified by the `SubmitTask` object. It slightly modifies the object by injecting some environment variables under `new_cluster.spark_env_vars` before submitting the object to the Databricks API. - -- **Returns a object representing the result of execution**. This is obtained by calling `get_materialize_result` on the object returned by `run` after the Databricks job has finished. **Note**: Execution can take several minutes even for trivial scripts due to Databricks cluster provisioning times. - -### Step 1.2: Define the Databricks Pipes client and definitions - -The [`dagster-databricks`](/\_apidocs/libraries/dagster-databricks) library provides a , which is a pre-built Dagster resource that allows you to quickly get Pipes working with your Databricks workspace. - -Add the following to the bottom of `dagster_databricks_pipes.py` to define the resource and a object that binds it to the `databricks_asset`: - -```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_client.py startafter=start_definitions endbefore=end_definitions -pipes_databricks_resource = PipesDatabricksClient( - client=WorkspaceClient( - host=os.getenv("DATABRICKS_HOST"), - token=os.getenv("DATABRICKS_TOKEN"), - ) -) - -defs = Definitions( - assets=[databricks_asset], resources={"pipes_databricks": pipes_databricks_resource} -) -``` - ---- - -## Step 2: Write a script for execution on Databricks - -The next step is to write the code that will be executed on Databricks. In the Databricks task specification in [Step 1.1](#step-11-define-the-dagster-asset), we referenced a file `dbfs:/my_python_script.py` in the `spark_python_task`: - -```python -"spark_python_task": { - "python_file": "dbfs:/my_python_script.py", # location of target code file - "source": jobs.Source.WORKSPACE, -} -``` - -We'll create this script from scratch and upload it to DBFS. You can use the Databricks UI or run a command from a shell to do this. To use the shell method, run: - -```shell -dbfs cp my_python_script.py dbfs:/my_python_script.py -``` - -Let's look at the script itself: - -```python file=/guides/dagster/dagster_pipes/databricks/databricks_script.py -### dbfs:/my_python_script.py - -# `dagster_pipes` must be available in the databricks python environment -from dagster_pipes import ( - PipesDbfsContextLoader, - PipesDbfsMessageWriter, - open_dagster_pipes, -) - -# Sets up communication channels and downloads the context data sent from Dagster. -# Note that while other `context_loader` and `message_writer` settings are -# possible, it is recommended to use `PipesDbfsContextLoader` and -# `PipesDbfsMessageWriter` for Databricks. -with open_dagster_pipes( - context_loader=PipesDbfsContextLoader(), - message_writer=PipesDbfsMessageWriter(), -) as pipes: - # Access the `extras` dict passed when launching the job from Dagster. - some_parameter_value = pipes.get_extra("some_parameter") - - # Stream log message back to Dagster - pipes.log.info(f"Using some_parameter value: {some_parameter_value}") - - # ... your code that computes and persists the asset - - # Stream asset materialization metadata and data version back to Dagster. - # This should be called after you've computed and stored the asset value. We - # omit the asset key here because there is only one asset in scope, but for - # multi-assets you can pass an `asset_key` parameter. - pipes.report_asset_materialization( - metadata={ - "some_metric": {"raw_value": some_parameter_value + 1, "type": "int"} - }, - data_version="alpha", - ) -``` - -Before we go any futher, let's review what this script does: - -- **Imports `PipesDbfsContextLoader`, `PipesDbfsMessageWriter`, and `open_dagster_pipes` from `dagster_pipes`.** The and are DBFS-specific implementations of the and . Refer to the [Dagster Pipes details and customization Guide](/guides/dagster-pipes/dagster-pipes-details-and-customization) for protocol details. - - Both objects write temporary files on DBFS for communication between the orchestration and external process. The and match a corresponding `PipesDbfsContextInjector` and `PipesDbfsMessageReader` on the orchestration end, which are instantiated inside the . - -- **Passes the context loader and message writer to the context manager**, which yields an instance of called `pipes`. - - Inside the body of the context manager are various calls against `pipes` to retrieve an extra, log, and report an asset materialization. All of these calls will use the DBFS temporary file-based communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes guide](/guides/dagster-pipes). - -At this point you can execute the rest of your Databricks code as normal, invoking various APIs as needed. - -#### Existing codebases - -For illustrative purposes, we've created a Python script from scratch. However, you may want to apply Pipes to an existing codebase. - -One approach that can be useful is to wrap the context manager around an existing `main` function or entry point. You can either pass the down through your business logic, or simply report an asset materialization after your business logic is done: - -```python file=/guides/dagster/dagster_pipes/databricks/databricks_script_existing.py -from dagster_pipes import ( - PipesDbfsContextLoader, - PipesDbfsMessageWriter, - open_dagster_pipes, -) - -# ... existing code - -if __name__ == "__main__": - with open_dagster_pipes( - context_loader=PipesDbfsContextLoader(), - message_writer=PipesDbfsMessageWriter(), - ) as pipes: - # ... existing logic - pipes.report_asset_materialization( - asset_key="foo", - metadata={"some_key": "some_value"}, - data_version="alpha", - ) -``` - ---- - -## Step 3: Run the Databricks job from the Dagster UI - -In this step, you’ll run the Databricks job you created in [Step 1.2](#step-12-define-the-databricks-pipes-client-and-definitions) from the Dagster UI. - -1. In a new command line session, run the following to start the UI: - - ```shell - dagster dev -f dagster_databricks_pipes.py - ``` - -2. Navigate to [localhost:3000](http://localhost:3000/), where you should see the UI: - - Databricks asset - -3. Click **Materialize** near the top right corner of the page, then click **View** on the **Launched Run** popup. Wait for the run to complete, and the event log should look like this: - - Event log for Databricks run - ---- - -## Advanced: Customization using open_pipes_session - -The is a high-level API that doesn't cover all use cases. If you have existing code to launch/poll the job you do not want to change, you want to stream back materializations as they occur, or you just want more control than is permitted by , you can use instead of . - -To use : - -1. Your Databricks job be launched within the scope of the context manager; and -2. Your job is launched on a cluster containing the environment variables available on the yielded `pipes_session` - -While your Databricks code is running, any calls to `report_asset_materialization` in the external script are streamed back to Dagster, causing a `MaterializationResult` object to be buffered on the `pipes_session`. You can either: - -- Leave these objects buffered until execution is complete (**Option 1** in below example code), or -- Stream them to Dagster machinery during execution by calling `yield pipes_session.get_results()` (**Option 2**) - -With either option, once the block closes, you must call `yield pipes_session.get_results()` to yield any remaining buffered results, since we cannot guarantee that all communications from Databricks have been processed until the `open_pipes_session` block closes. - -```python file=/guides/dagster/dagster_pipes/databricks/databricks_asset_open_pipes_session.py -import os -import sys - -from dagster_databricks import PipesDbfsContextInjector, PipesDbfsMessageReader -from dagster_databricks.pipes import PipesDbfsLogReader - -from dagster import AssetExecutionContext, asset, open_pipes_session -from databricks.sdk import WorkspaceClient - - -@asset -def databricks_asset(context: AssetExecutionContext): - client = WorkspaceClient( - host=os.environ["DATABRICKS_HOST"], - token=os.environ["DATABRICKS_TOKEN"], - ) - - # Arbitrary json-serializable data you want access to from the `PipesContext` - # in the Databricks runtime. Assume `sample_rate` is a parameter used by - # the target job's business logic. - extras = {"sample_rate": 1.0} - - # Sets up Pipes communications channels - with open_pipes_session( - context=context, - extras=extras, - context_injector=PipesDbfsContextInjector(client=client), - message_reader=PipesDbfsMessageReader( - client=client, - # These log readers are optional. If you provide them, then you must set the - # `new_cluster.cluster_log_conf.dbfs.destination` field in the job you submit to a valid - # DBFS path. This will configure Databricks to write stdout/stderr to the specified - # location every 5 minutes. Dagster will poll this location and forward the - # stdout/stderr logs every time they are updated to the orchestration process - # stdout/stderr. - log_readers=[ - PipesDbfsLogReader( - client=client, remote_log_name="stdout", target_stream=sys.stdout - ), - PipesDbfsLogReader( - client=client, remote_log_name="stderr", target_stream=sys.stderr - ), - ], - ), - ) as pipes_session: - ##### Option (1) - # NON-STREAMING. Just pass the necessary environment variables down. - # During execution, all reported materializations are buffered on the - # `pipes_session`. Yield them all after Databricks execution is finished. - - # Dict[str, str] with environment variables containing Pipes comms info. - env_vars = pipes_session.get_bootstrap_env_vars() - - # Some function that handles launching/monitoring of the Databricks job. - # It must ensure that the `env_vars` are set on the executing cluster. - custom_databricks_launch_code(env_vars) - - ##### Option (2) - # STREAMING. Pass `pipes_session` down. During execution, you can yield any - # asset materializations that have been reported by calling ` - # pipes_session.get_results()` as often as you like. `get_results` returns - # an iterator that your custom code can `yield from` to forward the - # results back to the materialize function. Note you will need to extract - # the env vars by calling `pipes_session.get_pipes_bootstrap_env_vars()`, - # and launch the Databricks job in the same way as with (1). - - # The function should return an `Iterator[MaterializeResult]`. - yield from custom_databricks_launch_code(pipes_session) - - # With either option (1) or (2), this is required to yield any remaining - # buffered results. - yield from pipes_session.get_results() -``` - ---- - -## Related - - - - - - diff --git a/docs/content/guides/dagster-pipes/kubernetes.mdx b/docs/content/guides/dagster-pipes/kubernetes.mdx deleted file mode 100644 index 7e095e21ea712..0000000000000 --- a/docs/content/guides/dagster-pipes/kubernetes.mdx +++ /dev/null @@ -1,248 +0,0 @@ ---- -title: "Integrating Kubernetes with Dagster Pipes | Dagster Docs" -description: "Learn to integrate Dagster Pipes with Kubernetes to launch external code from Dagster assets." ---- - -# Integrating Kubernetes with Dagster Pipes - - - Heads up! This guide focuses on using an out-of-the-box - Kubernetes resource. For further customization, use the{" "} - - open_pipes_session approach - {" "} - instead. - - -In this guide, we’ll show you how to use [Dagster Pipes](/guides/dagster-pipes) with Dagster’s Kubernetes integration to launch Kubernetes pods and execute external code. - -Pipes allows your code to interact with Dagster outside of a full Dagster environment. Instead, the environment only needs to contain `dagster-pipes`, a single-file Python package with no dependencies that can be installed from PyPI or easily vendored. `dagster-pipes` handles streaming `stdout`/`stderr` and Dagster events back to the orchestration process. - -**Note**: Dagster Pipes is currently **experimental**. - ---- - -## Prerequisites - -To use Dagster Pipes with Kubernetes, you’ll need: - -- **In the orchestration environment**, you'll need to install the following packages: - - ```shell - pip install dagster dagster-webserver dagster-k8s - ``` - - Refer to the [Dagster installation guide](/getting-started/install) for more info. - -- **A Kubernetes cluster**. This can be an existing cluster. Or, if working locally, you can use [kind](https://kind.sigs.k8s.io/) or [Docker Desktop](https://docs.docker.com/desktop/kubernetes/). - ---- - -## Step 1: Define the external Kubernetes code container - -In this step, you’ll create a Kubernetes container image that runs some code that uses `dagster-pipes`. - -### Step 1.1: Write a Python script - -First, you'll write a Python script that uses `dagster-pipes` and is executed in a container via Kubernetes: - -```python -# my_python_script.py - -from dagster_pipes import open_dagster_pipes - -with open_dagster_pipes() as pipes: - # Stream log message back to Dagster - pipes.log.info(f"Using some_parameter value: {some_parameter_value}") - - # ... your code that computes and persists the asset - - # Stream asset materialization metadata and data version back to Dagster. - # This should be called after you've computed and stored the asset value. We - # omit the asset key here because there is only one asset in scope, but for - # multi-assets you can pass an `asset_key` parameter. - pipes.report_asset_materialization( - metadata={ - "some_metric": {"raw_value": some_parameter_value + 1, "type": "int"} - }, - data_version="alpha", - ) -``` - -Let's review what this code does: - -- Imports from `dagster_pipes` - -- **Initializes the Dagster Pipes context (), which yields an instance of called `pipes`.** - - We're using the default context loader () and message writer () in this example. These objects establish communication between the orchestration and external process. On the orchestration end, these match a corresponding `PipesContextInjector` and `PipesMessageReader`, which are instantiated inside the . - -- **Inside the body of the context manager (), retrieve a log and report an asset materialization.** These calls use the temporary communications channels established by and . To see the full range of what you can do with the , see the API docs or the general [Pipes documentation](/guides/dagster-pipes). - -At this point you can execute the rest of your Kubernetes code as normal, invoking various APIs as needed. - -### Step 1.2: Define and build the container image - -Next, you'll package the script into a container image using a `Dockerfile`. For example: - -```dockerfile -FROM python:3.10-slim - -pip install dagster-pipes - -COPY my_python_script.py . -``` - -Then, build the image: - -```shell -docker build -t pipes-example:v1 -``` - -**Note**: Depending on the Kubernetes setup you're using, you may need to upload the container image to a registry or otherwise make it available to the cluster. For example: `kind load docker-image pipes-example:v1` - ---- - -## Step 2: Create the Dagster objects - -In this step, you’ll create a Dagster asset that, when materialized, opens a Dagster pipes session and spins up a Kubernetes pod to execute the container created in the previous step. - -### Step 2.1: Define the Dagster asset - -In your Dagster project, create a file named `dagster_k8s_pipes.py` and paste in the following code: - -```python -# dagster_k8s_pipes.py - -from dagster import AssetExecutionContext, Definitions, asset -from dagster_k8s import PipesK8sClient - - -@asset -def k8s_pipes_asset(context: AssetExecutionContext, k8s_pipes_client: PipesK8sClient): - return k8s_pipes_client.run( - context=context, - image="pipes-example:v1", - ).get_materialize_result() -``` - -Here’s what we did in this example: - -- Created an asset named `k8s_pipes_asset` - -- Provided as the `context` argument to the asset. This object provides access to system APIs such as resources, config, and logging. - -- Specified a resource for the asset to use, , which is a pre-built Dagster resource that allows you to quickly get Pipes working with Kubernetes. - - We also specified the following for the resource: - - - `context` - The asset's `context` () data - - `image` - The Kubernetes image we created in [Step 1](#step-1-define-the-external-kubernetes-code-container) - - These arguments are passed to the `run` method of , which submits the provided cluster information to the Kubernetes API and then runs the specified `image`. - -- Returned a object representing the result of execution. This is obtained by calling `get_materialize_result` on the object returned by `run` after the execution in Kubernetes has completed. - - - Heads up! Depending on your Kubernetes setup, there may be a - few additional things you need to do: -
        -
      • - - If the default behavior doesn't target the correct cluster - - , supply the load_incluster_config, - kubeconfig_file - , and kube_context arguments on -
      • -
      • - If you need to alter default spec behaviors, use - arguments on PipesK8sClient.run such as{" "} - base_pod_spec -
      • -
      -
      - -### Step 2.2: Create Dagster Definitions - -Next, you’ll add the asset and Kubernetes resource to your project’s code location via the object. This makes the resource available to [other Dagster definitions in the project](/concepts/code-locations). - -Copy and paste the following to the bottom of `dagster_k8s_pipes.py`: - -```python -# dagster_k8s_pipes.py - -defs = Definitions( - assets=[k8s_pipes_asset], - resources={ - "k8s_pipes_client": PipesK8sClient(), - }, -) -``` - -At this point, `dagster_k8s_pipes.py` should look like the following: - -```python -# dagster_k8s_pipes.py - -from dagster import AssetExecutionContext, Definitions, asset -from dagster_k8s import PipesK8sClient - - -@asset -def k8s_pipes_asset(context: AssetExecutionContext, k8s_pipes_client: PipesK8sClient): - return k8s_pipes_client.run( - context=context, - image="pipes-materialize:v1", - ).get_materialize_result() - - -defs = Definitions( - assets=[k8s_pipes_asset], - resources={ - "k8s_pipes_client": PipesK8sClient(), - }, -) -``` - ---- - -## Step 3: Launch the Kubernetes container from the Dagster UI - -In this step, you’ll run the Kubernetes container you defined in [Step 1](#step-1-define-the-external-kubernetes-code-container) from the Dagster UI. - -1. In a new command line session, run the following to start the UI: - - ```python - dagster dev -f dagster_k8s_pipes.py - ``` - -2. Navigate to [localhost:3000](http://localhost:3000/), where you should see the UI. - -3. Click **Materialize** near the top right corner of the page, then click **View** on the **Launched Run** popup. Wait for the run to complete, and the event log should look like this: - - Event log for Kubernetes run - ---- - -## Related - - - - - - diff --git a/docs/content/guides/dagster-pipes/subprocess.mdx b/docs/content/guides/dagster-pipes/subprocess.mdx deleted file mode 100644 index 105b7f9951133..0000000000000 --- a/docs/content/guides/dagster-pipes/subprocess.mdx +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "Dagster Pipes tutorial | Dagster Docs" -description: "Learn how to use Dagster Pipes's built-in subprocess implementation to invoke a subprocess with a given command and environment" ---- - -# Dagster Pipes tutorial - -In this guide, we’ll show you how to use [Dagster Pipes](/guides/dagster-pipes) with Dagster’s built-in subprocess `PipesSubprocessClient` to run a local subprocess with a given command and environment. You can then send information such as structured metadata and logging back to Dagster from the subprocess, where it will be visible in the Dagster UI. - -To get there, you'll: - -- [Create a Dagster asset that invokes a subprocess](/guides/dagster-pipes/subprocess/create-subprocess-asset) -- [Modify existing code to work with Dagster Pipes to send information back to Dagster](/guides/dagster-pipes/subprocess/modify-external-code) -- Learn about using Dagster Pipes with other entities in the Dagster system in the [Reference](/guides/dagster-pipes/subprocess/reference) section - - - -This guide focuses on using an out-of-the-box `PipesSubprocessClient` resource. For further customization with the subprocess invocation, use approach instead. - - - - - - - ---- - -## Prerequisites - -To use Dagster Pipes to run a subprocess, you’ll need to have Dagster (`dagster`) and the Dagster UI (`dagster-webserver`) installed. Refer to the [Installation guide](/getting-started/install) for more info. - -You'll also need **an existing Python script.** We’ll use the following Python script to demonstrate. This file will be invoked by the Dagster asset that you’ll create later in this tutorial. - -Create a file named `external_code.py` and paste the following into it: - -```python file=/guides/dagster/dagster_pipes/subprocess/part_1/external_code.py lines=2- -import pandas as pd - - -def main(): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - total_orders = len(orders_df) - print(f"processing total {total_orders} orders") - - -if __name__ == "__main__": - main() -``` - ---- - -## Ready to get started? - -When you've fulfilled all the prerequisites for the tutorial, you can get started by [creating a Dagster asset that executes a subprocess](/guides/dagster-pipes/subprocess/create-subprocess-asset). diff --git a/docs/content/guides/dagster-pipes/subprocess/reference.mdx b/docs/content/guides/dagster-pipes/subprocess/reference.mdx deleted file mode 100644 index 470c6b95bd29d..0000000000000 --- a/docs/content/guides/dagster-pipes/subprocess/reference.mdx +++ /dev/null @@ -1,358 +0,0 @@ ---- -title: "Dagster Pipes subprocess reference | Dagster Docs" -description: "This page shows ways to execute external code with Dagster Pipes with different entities in the Dagster system." ---- - -# Dagster Pipes subprocess reference - -This reference shows usage of Dagster Pipes with other entities in the Dagster system. For a step-by-step walkthrough, refer to the [Dagster Pipes tutorial](/guides/dagster-pipes/subprocess). - ---- - -## Specifying environment variables and extras - -When launching the subprocess, you may want to make environment variables or additional parameters available in the external process. Extras are arbitrary, user-defined parameters made available on the context object in the external process. - - - - -In the external code, you can access extras via the `PipesContext` object: - -```python file=/guides/dagster/dagster_pipes/subprocess/with_extras_env/external_code.py lines=2- -import os - -import pandas as pd -from dagster_pipes import PipesContext, open_dagster_pipes - - -def main(): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - total_orders = len(orders_df) - # get the Dagster Pipes context - context = PipesContext.get() - # get all extras provided by Dagster asset - print(context.extras) - # get the value of an extra - print(context.get_extra("foo")) - # get env var - print(os.environ["MY_ENV_VAR_IN_SUBPROCESS"]) - - -if __name__ == "__main__": - # connect to Dagster Pipes - with open_dagster_pipes(): - main() -``` - - - - -The `run` method to the `PipesSubprocessClient` resource also accepts `env` and `extras` , which allow you to specify environment variables and extra arguments when executing the subprocess: - -Note: We're using `os.environ` in this example, but Dagster's recommendation is to use in production. - -```python file=/guides/dagster/dagster_pipes/subprocess/with_extras_env/dagster_code.py -import shutil - -from dagster import ( - AssetExecutionContext, - Definitions, - MaterializeResult, - PipesSubprocessClient, - asset, - file_relative_path, -) - - -@asset -def subprocess_asset( - context: AssetExecutionContext, pipes_subprocess_client: PipesSubprocessClient -) -> MaterializeResult: - cmd = [shutil.which("python"), file_relative_path(__file__, "external_code.py")] - return pipes_subprocess_client.run( - command=cmd, - context=context, - extras={"foo": "bar"}, - env={ - "MY_ENV_VAR_IN_SUBPROCESS": "my_value", - }, - ).get_materialize_result() - - -defs = Definitions( - assets=[subprocess_asset], - resources={"pipes_subprocess_client": PipesSubprocessClient()}, -) -``` - - - - ---- - -## Working with @asset_check - -Sometimes, you may not want to materialize an asset, but instead want to report a data quality check result. When your asset has data quality checks defined in : - - - - - -From the external code, you can report to Dagster that an asset check has been performed via . Note that `asset_key` in this case is required, and must match the asset key defined in : - -```python file=/guides/dagster/dagster_pipes/subprocess/with_asset_check/external_code.py -import pandas as pd -from dagster_pipes import PipesContext, open_dagster_pipes - - -def main(): - orders_df = pd.DataFrame({"order_id": [1, 2], "item_id": [432, 878]}) - # get the Dagster Pipes context - context = PipesContext.get() - # send structured metadata back to Dagster - context.report_asset_check( - asset_key="my_asset", - passed=orders_df[["item_id"]].notnull().all().bool(), - check_name="no_empty_order_check", - ) - - -if __name__ == "__main__": - # connect to Dagster Pipes - with open_dagster_pipes(): - main() -``` - - - - -On Dagster's side, the `PipesClientCompletedInvocation` object returned from `PipesSubprocessClient` includes a `get_asset_check_result` method, which you can use to access the event reported by the subprocess. - -```python file=/guides/dagster/dagster_pipes/subprocess/with_asset_check/dagster_code.py -import shutil - -from dagster import ( - AssetCheckResult, - AssetExecutionContext, - Definitions, - MaterializeResult, - PipesSubprocessClient, - asset, - asset_check, - file_relative_path, -) - - -@asset -def my_asset(): - ... - - -@asset_check(asset="my_asset") -def no_empty_order_check( - context: AssetExecutionContext, pipes_subprocess_client: PipesSubprocessClient -) -> AssetCheckResult: - cmd = [ - shutil.which("python"), - file_relative_path(__file__, "external_code.py"), - ] - return pipes_subprocess_client.run( - command=cmd, context=context - ).get_asset_check_result() - - -defs = Definitions( - assets=[my_asset], - asset_checks=[no_empty_order_check], - resources={"pipes_subprocess_client": PipesSubprocessClient()}, -) -``` - - - - ---- - -## Working with multi-assets - -Sometimes, you may invoke a single call to an API that results in multiple tables being updated, or you may have a single script that computes multiple assets. In these cases, you can use Dagster Pipes to report back on multiple assets at once. - - - - - -**Note**: when working with multi-assets, \` may only be called once per unique asset key. If called more than once, an error similar to the following will surface: - -```bash -Calling {method} with asset key {asset_key} is undefined. Asset has already been materialized, so no additional data can be reported for it -``` - -Instead, you’ll need to set the `asset_key` parameter for each instance of : - -```python file=/guides/dagster/dagster_pipes/subprocess/with_multi_asset/external_code.py -import pandas as pd -from dagster_pipes import PipesContext, open_dagster_pipes - - -def main(): - orders_df = pd.DataFrame( - {"order_id": [1, 2, 3], "item_id": [432, 878, 102], "user_id": ["a", "b", "a"]} - ) - total_orders = len(orders_df) - total_users = orders_df["user_id"].nunique() - - # get the Dagster Pipes context - context = PipesContext.get() - # send structured metadata back to Dagster. asset_key is required when there are multiple assets - context.report_asset_materialization( - asset_key="orders", metadata={"total_orders": total_orders} - ) - context.report_asset_materialization( - asset_key="users", metadata={"total_users": total_users} - ) - - -if __name__ == "__main__": - # connect to Dagster Pipes - with open_dagster_pipes(): - main() -``` - - - - - -In the Dagster code, you can use to define a single asset that represents multiple assets. The `PipesClientCompletedInvocation` object returned from `PipesSubprocessClient` includes a `get_results` method, which you can use to access all the events, such as multiple and , reported by the subprocess: - -```python file=/guides/dagster/dagster_pipes/subprocess/with_multi_asset/dagster_code.py -import shutil - -from dagster import ( - AssetExecutionContext, - AssetSpec, - Definitions, - PipesSubprocessClient, - file_relative_path, - multi_asset, -) - - -@multi_asset(specs=[AssetSpec("orders"), AssetSpec("users")]) -def subprocess_asset( - context: AssetExecutionContext, pipes_subprocess_client: PipesSubprocessClient -): - cmd = [ - shutil.which("python"), - file_relative_path(__file__, "external_code.py"), - ] - return pipes_subprocess_client.run(command=cmd, context=context).get_results() - - -defs = Definitions( - assets=[subprocess_asset], - resources={"pipes_subprocess_client": PipesSubprocessClient()}, -) -``` - - - - ---- - -## Passing custom data - -Sometimes, you may want to pass data back from the external process for use in the orchestration code for purposes other than reporting directly to Dagster such as use in creating an output. In this example we use custom messages to create an I/O managed output that is returned from the asset. - - - - -In the external code, we send messages using `report_custom_message`. The message can be any data that is JSON serializable. - -```python file=/guides/dagster/dagster_pipes/subprocess/custom_messages/external_code.py -import pandas as pd -from dagster_pipes import PipesContext, open_dagster_pipes - - -def main(): - # get the Dagster Pipes context - context = PipesContext.get() - - # compute the full orders data - orders = pd.DataFrame( - { - "order_id": [1, 2, 3], - "item_id": [321, 654, 987], - "order_details": [..., ..., ...], # imagine large data, - # and more columns - } - ) - - # send a smaller table to be I/O managed by Dagster and passed to downstream assets - summary_table = pd.DataFrame(orders[["order_id", "item_id"]]) - context.report_custom_message(summary_table.to_dict()) - - context.report_asset_materialization(metadata={"total_orders": len(orders)}) - - -if __name__ == "__main__": - # connect to Dagster Pipes - with open_dagster_pipes(): - main() -``` - - - - -In the Dagster code we receive custom messages using `get_custom_messages`. - -```python file=/guides/dagster/dagster_pipes/subprocess/custom_messages/dagster_code.py -import shutil - -import pandas as pd - -from dagster import ( - AssetExecutionContext, - Definitions, - Output, - PipesSubprocessClient, - asset, - file_relative_path, -) - - -@asset -def subprocess_asset( - context: AssetExecutionContext, - pipes_subprocess_client: PipesSubprocessClient, -) -> Output[pd.DataFrame]: - cmd = [shutil.which("python"), file_relative_path(__file__, "external_code.py")] - result = pipes_subprocess_client.run( - command=cmd, - context=context, - ) - - # a small summary table gets reported as a custom message - messages = result.get_custom_messages() - if len(messages) != 1: - raise Exception("summary not reported") - - summary_df = pd.DataFrame(messages[0]) - - # grab any reported metadata off of the materialize result - metadata = result.get_materialize_result().metadata - - # return the summary table to be loaded by Dagster for downstream assets - return Output( - value=summary_df, - metadata=metadata, - ) - - -defs = Definitions( - assets=[subprocess_asset], - resources={"pipes_subprocess_client": PipesSubprocessClient()}, -) -``` - - - diff --git a/docs/content/guides/dagster/airbyte-ingestion-as-code.mdx b/docs/content/guides/dagster/airbyte-ingestion-as-code.mdx deleted file mode 100644 index d9a37d69b428f..0000000000000 --- a/docs/content/guides/dagster/airbyte-ingestion-as-code.mdx +++ /dev/null @@ -1,223 +0,0 @@ ---- -title: Airbyte ingestion as code | Dagster Docs ---- - -# Airbyte ingestion as code - - - This feature is experimental and deprecated{" "} - and will be removed with a future release. We suggest using the{" "} - - Airbyte terraform provider - {" "} - instead. - - -This guide provides an introduction to using Dagster to configure your [Airbyte](/integrations/airbyte) connections. This allows you to centralize the configuration for your data stack, specifying configuration in Python code. You can check-in and version your config with version control or programmatically generate config for more complex use cases. - ---- - -## Prerequisites - -To use this feature, you'll need to install the `dagster-airbyte` and `dagster-managed-elements` libraries: - -```bash -pip install dagster-airbyte dagster-managed-elements -``` - -The `dagster-managed-elements` library includes the base config reconciliation APIs and a CLI. - ---- - -## Step 1: Define a reconciler - -The config for your Airbyte instance is specified in an `AirbyteManagedElementReconciler`, which is pointed at a specific Airbyte instance using an Airbyte resource. The config is also provided with a list of connections to reconcile, which we'll set up later in the guide. - -```python startafter=start_define_reconciler endbefore=end_define_reconciler file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -from dagster_airbyte import AirbyteManagedElementReconciler, airbyte_resource - -airbyte_instance = airbyte_resource.configured( - { - "host": "localhost", - "port": "8000", - # If using basic auth, include username and password: - "username": "airbyte", - "password": {"env": "AIRBYTE_PASSWORD"}, - } -) - -airbyte_reconciler = AirbyteManagedElementReconciler( - airbyte=airbyte_instance, - connections=[], -) -``` - -For more info on setting up an Airbyte resource, refer to [the Airbyte guide](/integrations/airbyte#step-1-connecting-to-airbyte). Additional configuration options for the reconciler are [detailed below](#additional-configuration-options). - -For more information on setting up secrets from the environment, refer to the [Environment variables and secrets guide](/guides/dagster/using-environment-variables-and-secrets). - ---- - -## Step 2: Define sources and destinations - -Next, we'll define our sources and destinations. Sources and destinations can be constructed manually using the `AirbyteSource` and `AirbyteDestination` classes, respectively, but `dagster-airbyte` also provides generated, typed classes for specific source and destination types. Refer to the [Airbyte API docs](/\_apidocs/libraries/dagster-airbyte#managed-config-generated-sources) for the properties required to configure each source and destination type. - -In this example, we'll configure a source that reads from a hosted CSV file and a destination that writes it to a local JSON file. To do this, we'll import the generated classes for the `File` source and `Local JSON` destination: - -```python startafter=start_define_sources endbefore=end_define_sources file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -from dagster_airbyte.managed.generated.sources import FileSource -from dagster_airbyte.managed.generated.destinations import LocalJsonDestination - -cereals_csv_source = FileSource( - name="cereals-csv", - url="https://docs.dagster.io/assets/cereal.csv", - format="csv", - provider=FileSource.HTTPSPublicWeb(), - dataset_name="cereals", -) - -local_json_destination = LocalJsonDestination( - name="local-json", - destination_path="/local/cereals_out.json", -) -``` - ---- - -## Step 3: Define a connection - -Next, we'll define a connection between the source and destination using the [`AirbyteConnection`](/\_apidocs/libraries/dagster-airbyte#dagster_airbyte.AirbyteConnection) class: - -```python startafter=start_define_connection endbefore=end_define_connection file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -from dagster_airbyte import AirbyteConnection, AirbyteSyncMode - -cereals_connection = AirbyteConnection( - name="download-cereals", - source=cereals_csv_source, - destination=local_json_destination, - stream_config={"cereals": AirbyteSyncMode.full_refresh_overwrite()}, -) -``` - -Then, we'll supply the new connection to the reconciler we defined in [Step 1](#step-1-define-a-reconciler): - -```python startafter=start_new_reconciler endbefore=end_new_reconciler file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -airbyte_reconciler = AirbyteManagedElementReconciler( - airbyte=airbyte_instance, - connections=[cereals_connection], -) -``` - ---- - -## Step 4. Validate changes - -Next, we'll use the `dagster-airbyte` CLI to sanity-check our reconciler and apply any changes. - -The `check` command prints out differences between the current state of the Airbyte instance and the desired state specified in the reconciler. To invoke the CLI, point it at a module containing the reconciler: - -```bash -dagster-airbyte check --module my_python_module.my_submodule:reconciler - -Found 1 reconciler, checking... -+ cereals-csv: - + url: https://docs.dagster.io/assets/cereal.csv - + format: csv - + dataset_name: cereals - + provider: - + user_agent: False - + storage: HTTPS -+ local-json: - + destination_path: /local/cereals_out.json -+ download-cereals: - + source: cereals-csv - + destination: local-json - + normalize data: None - + streams: - + cereals: FULL_REFRESH_OVERWRITE -``` - ---- - -## Step 5. Apply changes - -As the changes printed out by the `check` command in the previous step look like what we expect, we can now apply them: - -```bash -dagster-airbyte apply --module my_python_module.my_submodule:reconciler -``` - -Now, we should see our new connection in the Airbyte UI: - - - ---- - -## Step 6. Load connections into Dagster - -To load managed connections into Dagster, use the `load_assets_from_connections` utility method. This functions similarly to [`load_assets_from_airbyte_instance`](/integrations/airbyte#loading-airbyte-asset-definitions-from-an-airbyte-instance), but validates that the connections passed in match the connections defined in your Airbyte instance: - -```python startafter=start_load_assets endbefore=end_load_assets file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -from dagster_airbyte import load_assets_from_connections, airbyte_resource - -airbyte_instance = airbyte_resource.configured( - { - "host": "localhost", - "port": 8000, - # If using basic auth, include username and password: - "username": "airbyte", - "password": {"env": "AIRBYTE_PASSWORD"}, - } -) - -airbyte_assets = load_assets_from_connections( - airbyte=airbyte_instance, connections=[cereals_connection] -) -``` - -For more info on managing Airbyte assets in Dagster, refer to the [Airbyte guide](/integrations/airbyte). - ---- - -## Additional configuration options - -The Airbyte reconciler also supports some additional configuration options, which can be passed to the `AirbyteManagedElementReconciler` constructor. - -By default, the reconciler will not modify any sources, destinations, or connections which are outside of those specified in the reconciler. This allows you to adopt the reconciler incrementally, without having to reconcile all of your existing Airbyte configuration. - -To reconcile all of your existing Airbyte configuration, pass `delete_unmentioned_resources=True` to the reconciler constructor: - -```python startafter=start_new_reconciler_delete endbefore=end_new_reconciler_delete file=/guides/dagster/ingestion_as_code/airbyte.py dedent=4 -airbyte_reconciler = AirbyteManagedElementReconciler( - airbyte=airbyte_instance, connections=[...], delete_unmentioned_resources=True -) -``` - -This tells the reconciler to clean up any sources, destinations, or connections which are not explicitly defined in Python code. - ---- - -## Related - - - - - - diff --git a/docs/content/guides/dagster/asset-versioning-and-caching.mdx b/docs/content/guides/dagster/asset-versioning-and-caching.mdx index 4551347d1ecf0..8c740d59db832 100644 --- a/docs/content/guides/dagster/asset-versioning-and-caching.mdx +++ b/docs/content/guides/dagster/asset-versioning-and-caching.mdx @@ -35,7 +35,7 @@ In computationally expensive data pipelining, this approach can yield tremendous ## Step one: Understanding data versions -By default, Dagster automatically computes a data version for each materialization of a software-defined asset. It does this by hashing a code version together with the data versions of any input assets. +By default, Dagster automatically computes a data version for each materialization of an asset. It does this by hashing a code version together with the data versions of any input assets. Let's start with a trivial asset that returns a hardcoded number: diff --git a/docs/content/guides/dagster/branch_deployments.mdx b/docs/content/guides/dagster/branch_deployments.mdx index 7bf65949d6506..073fac459549e 100644 --- a/docs/content/guides/dagster/branch_deployments.mdx +++ b/docs/content/guides/dagster/branch_deployments.mdx @@ -1,15 +1,15 @@ --- -title: Testing against production with Dagster Cloud Branch Deployments | Dagster +title: Testing against production with Dagster+ Branch Deployments | Dagster description: This guide illustrates a workflow that enables testing Dagster code in your cloud environment without impacting your production data. --- -# Testing against production with Dagster Cloud Branch Deployments +# Testing against production with Dagster+ Branch Deployments -This guide is applicable to Dagster Cloud. +This guide is applicable to Dagster+. -This guide details a workflow to test Dagster code in your cloud environment without impacting your production data. To highlight this functionality, we’ll leverage Dagster Cloud branch deployments and a Snowflake database to: +This guide details a workflow to test Dagster code in your cloud environment without impacting your production data. To highlight this functionality, we’ll leverage Dagster+ branch deployments and a Snowflake database to: -- Execute code on a feature branch directly on Dagster Cloud +- Execute code on a feature branch directly on Dagster+ - Read and write to a unique per-branch clone of our Snowflake data With these tools, we can merge changes with confidence in the impact on our data platform and with the assurance that our code will execute as intended. @@ -33,17 +33,16 @@ Here’s an overview of the main concepts we’ll be using: Transitioning data pipelines from development to production {" "} guide, illustrating a workflow for staging deployments. We’ll use the examples - from this guide to build a workflow atop Dagster Cloud’s branch deployment - feature. + from this guide to build a workflow atop Dagster+’s branch deployment feature. To complete the steps in this guide, you'll need: -- A Dagster Cloud account -- An existing Branch Deployments setup that uses [GitHub actions](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-github) or [Gitlab CI/CD](/dagster-cloud/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab). Your setup should contain a Dagster project set up for branch deployments containing: +- A Dagster+ account +- An existing Branch Deployments setup that uses [GitHub actions](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-github) or [Gitlab CI/CD](/dagster-plus/managing-deployments/branch-deployments/using-branch-deployments-with-gitlab). Your setup should contain a Dagster project set up for branch deployments containing: - Either a GitHub actions workflow file (e.g. `.github/workflows/branch-deployments.yaml`) or a Gitlab CI/CD file (e.g. `.gitlab-ci.yml`) - Dockerfile that installs your Dagster project -- User permissions in Dagster Cloud that allow you to [access Branch Deployments](/dagster-cloud/account/managing-users/managing-user-roles-permissions) +- User permissions in Dagster+ that allow you to [access Branch Deployments](/dagster-plus/account/managing-users/managing-user-roles-permissions) --- @@ -126,12 +125,12 @@ At runtime, we’d like to determine which environment our code is running in: b To ensure we can't accidentally write to production from within our branch deployment, we’ll use a different set of credentials from production and write to our database clone. -Dagster automatically sets certain [environment variables](/dagster-cloud/managing-deployments/environment-variables-and-secrets#built-in-environment-variables) containing deployment metadata, allowing us to read these environment variables to discern between deployments. We can access the `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` environment variable to determine the currently executing environment. +Dagster automatically sets certain [environment variables](/dagster-plus/managing-deployments/reserved-environment-variables) containing deployment metadata, allowing us to read these environment variables to discern between deployments. We can access the `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` environment variable to determine the currently executing environment. Because we want to configure our assets to write to Snowflake using a different set of credentials and database in each environment, we’ll configure a separate I/O manager for each environment: ```python file=/guides/dagster/development_to_production/branch_deployments/repository_v1.py startafter=start_repository endbefore=end_repository -# __init__.py +# definitions.py from dagster import Definitions from ..assets import comments, items, stories @@ -170,7 +169,7 @@ defs = Definitions( ) ``` -Refer to the [Dagster Cloud environment variables documentation](/dagster-cloud/managing-deployments/environment-variables-and-secrets) for more info about available environment variables. +Refer to the [Dagster+ environment variables documentation](/dagster-plus/managing-deployments/environment-variables-and-secrets) for more info about available environment variables. --- @@ -367,7 +366,7 @@ deploy-docker-branch: ... # Add a step to launch the job cloning the prod db - - dagster-cloud job launch + - dagster-plus job launch --url "$DAGSTER_CLOUD_URL/$DEPLOYMENT_NAME" --api-token "$DAGSTER_CLOUD_API_TOKEN" --location "location_name_containing_clone_prod_job" @@ -407,7 +406,8 @@ height={537} ## Step 5: Delete our database clone upon closing a branch - + + Finally, we can add a step to our `branch_deployments.yml` file that queues a run of our `drop_prod_clone` job: ```yaml file=/guides/dagster/development_to_production/branch_deployments/drop_db_clone.yaml @@ -445,6 +445,7 @@ name: Dagster Branch Deployments + Finally, we can add a step to our `.gitlab-ci.yml` file that queues a run of our `drop_prod_clone` job: ```yaml file=/guides/dagster/development_to_production/branch_deployments/drop_db_clone.gitlab-ci.yml @@ -483,7 +484,7 @@ close_branch: ... # Add a step to launch the job dropping the cloned db - - dagster-cloud job launch + - dagster-plus job launch --url "$DAGSTER_CLOUD_URL/$DEPLOYMENT_NAME" --api-token "$DAGSTER_CLOUD_API_TOKEN" --location "location_name_containing_drop_prod_clone_job" diff --git a/docs/content/guides/dagster/code-references.mdx b/docs/content/guides/dagster/code-references.mdx new file mode 100644 index 0000000000000..52e546c9eb490 --- /dev/null +++ b/docs/content/guides/dagster/code-references.mdx @@ -0,0 +1,241 @@ +--- +title: Linking to asset definition code with code references | Dagster Docs +--- + +# Linking to asset definition code with code references + + + This feature is considered experimental and is under active + development. This guide will be updated as we roll out new features. + + +Attaching code reference metadata to your Dagster asset definitions allows you to easily view those assets' source code from the Dagster UI: + +- **In local development**, navigate directly to the code backing an asset in your editor +- **In your production environment**, link to source code in your source control repository to see the full change history for an asset + +In this guide, we'll show you how to automatically and manually attach code references to your Dagster assets. + +--- + +## Prerequisites + +To complete the steps in this guide, you'll need: + +- A set of Dagster asset definitions that you want to link to code +- Dagster version `1.7.6` or newer + +--- + +## Automatically attaching local file code references to asset definitions + +### Assets defined in Python + +To automatically attach code references to Python assets' function definitions, you can use the utility. Any asset definitions passed to the utility will have their source file attached as metadata. + +For example, given the following Python file `with_source_code_references.py`: + +```python file=/guides/dagster/code_references/with_source_code_references.py +from dagster import Definitions, asset, with_source_code_references + + +@asset +def my_asset(): ... + + +@asset +def another_asset(): ... + + +defs = Definitions(assets=with_source_code_references([my_asset, another_asset])) +``` + +A link to the asset's source in `with_source_code_references.py` will then be visible in the **Asset Catalog** view in the Dagster UI: + +
      + Asset catalog view showing link to with_source_code_references.py +
      + +### dbt assets + +Dagster's dbt integration can automatically attach references to the SQL files backing your dbt assets. For more information, see the [dagster-dbt integration reference](/integrations/dbt/reference#attaching-code-reference-metadata). + +--- + +## Manually attaching local file code references to asset definitions + +In some cases, you may want to manually attach code references to your asset definitions. Some assets may have a more complex source structure, such as an asset whose definition is spread across multiple Python source files or an asset which is partially defined with a `.sql` model file. + +To manually attach code references to an asset definition, use . You can then choose to augment these manual references with : + +```python file=/guides/dagster/code_references/manual_references.py +import os + +from dagster import ( + CodeReferencesMetadataValue, + Definitions, + LocalFileCodeReference, + asset, + with_source_code_references, +) + + +@asset( + metadata={ + "dagster/code_references": CodeReferencesMetadataValue( + code_references=[ + LocalFileCodeReference( + file_path=os.path.join(os.path.dirname(__file__), "source.yaml"), + # Label and line number are optional + line_number=1, + label="Model YAML", + ) + ] + ) + } +) +def my_asset_modeled_in_yaml(): ... + + +defs = Definitions(assets=with_source_code_references([my_asset_modeled_in_yaml])) +``` + +Each of the code references to `manual_references.py` will be visible in the **Asset details** page in the Dagster UI: + +
      + Asset details view showing link to multiple files +
      + +--- + +## Converting code references to link to a remote git repository + +In a local context, it is useful to specify local code references in order to navigate directly to the source code of an asset. However, in a production environment, you may want to link to the source control repository where the code is stored. + +### In Dagster Plus + +If using Dagster Plus, you can use the `link_code_references_to_git_if_cloud` utility to conditionally convert local file code references to source control links. This utility will automatically detect if your code is running in a Dagster Cloud environment and convert local file code references to source control links, pointing at the commit hash of the code running in the current deployment. + + + +```python +import os +from pathlib import Path + +from dagster_cloud.metadata.source_code import link_code_references_to_git_if_cloud + +from dagster import ( + AnchorBasedFilePathMapping, + Definitions, + asset, + with_source_code_references, +) + + +@asset +def my_asset(): ... + + +@asset +def another_asset(): ... + + +defs = Definitions( + assets=link_code_references_to_git_if_cloud( + assets_defs=with_source_code_references([my_asset, another_asset]), + # Inferred from searching for .git directory in parent directories + # of the module containing this code - may also be set explicitly + file_path_mapping=AnchorBasedFilePathMapping( + local_file_anchor=Path(__file__), + file_anchor_path_in_repository="src/repo.py", + ), + ) +) +``` + +### In any Dagster environment + +The utility allows you to convert local file code references to source control links. You'll need to provide the base URL of your git repository, the branch or commit hash, and a which tells Dagster how to convert local file paths to paths in the repository. The simplest way to do so is with an , which uses a local file path and the corresponding path in the repository to infer the mapping for other files. + +```python file=/guides/dagster/code_references/link_to_source_control.py +from pathlib import Path + +from dagster import ( + AnchorBasedFilePathMapping, + Definitions, + asset, + link_code_references_to_git, + with_source_code_references, +) + + +@asset +def my_asset(): ... + + +@asset +def another_asset(): ... + + +defs = Definitions( + assets=link_code_references_to_git( + assets_defs=with_source_code_references([my_asset, another_asset]), + git_url="https://github.com/dagster-io/dagster", + git_branch="main", + file_path_mapping=AnchorBasedFilePathMapping( + local_file_anchor=Path(__file__), + file_anchor_path_in_repository="src/repo.py", + ), + ) +) +``` + +You may choose to conditionally apply this transformation based on the environment in which your Dagster code is running. For example, you could use an environment variable to determine whether to link to local files or to a source control repository: + +```python file=/guides/dagster/code_references/link_to_source_control_conditional.py +import os +from pathlib import Path + +from dagster import ( + AnchorBasedFilePathMapping, + Definitions, + asset, + link_code_references_to_git, + with_source_code_references, +) + + +@asset +def my_asset(): ... + + +@asset +def another_asset(): ... + + +assets = with_source_code_references([my_asset, another_asset]) + +defs = Definitions( + assets=link_code_references_to_git( + assets_defs=assets, + git_url="https://github.com/dagster-io/dagster", + git_branch="main", + file_path_mapping=AnchorBasedFilePathMapping( + local_file_anchor=Path(__file__), + file_anchor_path_in_repository="src/repo.py", + ), + ) + if bool(os.getenv("IS_PRODUCTION")) + else assets +) +``` diff --git a/docs/content/guides/dagster/dagster_type_factories.mdx b/docs/content/guides/dagster/dagster_type_factories.mdx index 2cfd7b565466b..a28de55b39a7e 100644 --- a/docs/content/guides/dagster/dagster_type_factories.mdx +++ b/docs/content/guides/dagster/dagster_type_factories.mdx @@ -86,7 +86,7 @@ def generate_plot(context: OpExecutionContext, trips): bin_edges = np.histogram_bin_edges(minute_lengths, 15) fig, ax = plt.subplots(figsize=(10, 5)) ax.set(title="Trip lengths", xlabel="Minutes", ylabel="Count") - ax.hist(minute_lengths, bins=bin_edges) + ax.hist(minute_lengths, bins=list(bin_edges)) fig.savefig("trip_lengths.png") context.log_event( AssetMaterialization( @@ -251,7 +251,7 @@ def generate_plot(context: OpExecutionContext, trips): bin_edges = np.histogram_bin_edges(minute_lengths, 15) fig, ax = plt.subplots(figsize=(10, 5)) ax.set(title="Trip lengths", xlabel="Minutes", ylabel="Count") - ax.hist(minute_lengths, bins=bin_edges) + ax.hist(minute_lengths, bins=list(bin_edges)) fig.savefig("trip_lengths.png") context.log_event( AssetMaterialization( diff --git a/docs/content/guides/dagster/enriching-with-software-defined-assets.mdx b/docs/content/guides/dagster/enriching-with-software-defined-assets.mdx index 50ab2604997e7..70a143ee48f0e 100644 --- a/docs/content/guides/dagster/enriching-with-software-defined-assets.mdx +++ b/docs/content/guides/dagster/enriching-with-software-defined-assets.mdx @@ -1,26 +1,26 @@ --- -title: Upgrading to Software-defined Assets | Dagster Docs -description: Understand when, why, and how to use Software-defined Assets (SDAs) in Dagster, as well as how SDAs work with other core Dagster concepts. +title: Upgrading to asset definitions | Dagster Docs +description: Understand when, why, and how to use asset definitions in Dagster, as well as how they work with other core Dagster concepts. --- -# Upgrading to Software-defined Assets +# Upgrading to asset definitions -Familiar with ops and graphs? Want to understand when, why, and how to use Software-defined Assets in Dagster? If so, this guide is for you. We'll also demonstrate what some common Dagster jobs look like before and after using Software-defined Assets. +Familiar with ops and graphs? Want to understand when, why, and how to use asset definitions in Dagster? If so, this guide is for you. We'll also demonstrate what some common Dagster jobs look like before and after using asset definitions. Before we jump in, here's a quick refresher: - An **asset** is a persistent object in storage, such as a table, machine learning (ML) model, or file. - An [**op**](/concepts/ops-jobs-graphs/ops) is the core unit of computation in Dagster. For example, an op might accept tabular data as its input and produce transformed tabular data as its output. - A [**graph**](/concepts/ops-jobs-graphs/graphs) is a directed acyclic graph of ops or other graphs, which execute in order and pass data to each other. -- A [**Software-defined Asset**](/concepts/assets/software-defined-assets) is a declaration of an asset that should exist and a description of how to compute it: the op or graph that needs to run and the upstream assets that it should run on. +- An [**asset definition**](/concepts/assets/software-defined-assets) is a declaration of an asset that should exist and a description of how to compute it: the op or graph that needs to run and the upstream assets that it should run on. -**Software-defined assets aren't a replacement for Dagster's core computational concepts** - ops are, in fact, the core unit of computation that occurs **within an asset**. Think of them as a top layer that links ops, graphs, and jobs to the long-lived objects they interact with. +**Asset definitions aren't a replacement for Dagster's core computational concepts** - ops are, in fact, the core unit of computation that occurs **within an asset**. Think of them as a top layer that links ops, graphs, and jobs to the long-lived objects they interact with. --- -## Why use Software-defined Assets? +## Why use asset definitions? -Using Software-defined Assets means building Dagster jobs in a way that declares _ahead of time_ the assets they produce and consume. This is different than using the API, which only informs Dagster at runtime about the assets a job interacted with. +Using asset definitions means building Dagster jobs in a way that declares _ahead of time_ the assets they produce and consume. This is different than using the API, which only informs Dagster at runtime about the assets a job interacted with. Preemptively declaring assets offers distinct advantages, including: @@ -30,13 +30,13 @@ Preemptively declaring assets offers distinct advantages, including: ### Lineage -As Software-defined Assets know what other assets they depend on, an asset's lineage can be [viewed easily in the Dagster UI](/concepts/assets/software-defined-assets#viewing-assets-in-the-ui). +As asset definitions know what other assets they depend on, an asset's lineage can be [viewed easily in the Dagster UI](/concepts/assets/software-defined-assets#viewing-assets-in-the-ui). Assets help track and define cross-job dependencies. For example, when viewing a job that materializes assets, you can navigate to the jobs that produce the assets that it depends on. Additionally, when an upstream asset has been updated more recently than a downstream asset, Dagster will indicate that the downstream asset might be out of date. ### Direct operation -Using Software-defined Assets enables you to directly operate them in the UI. On the [Asset's Details page](/concepts/webserver/ui#asset-details), you can: +Using asset definitions enables you to directly operate your assets in the UI. On the [Asset's Details page](/concepts/webserver/ui#asset-details), you can: - View the materialization history of the asset - Check when the next materialization will occur @@ -45,13 +45,13 @@ Using Software-defined Assets enables you to directly operate them in the UI. On ### Improved code ergonomics -Software-defined assets provide sizeable improvements when it comes to code ergonomics: +Asset definitions provide sizeable improvements when it comes to code ergonomics: -- **You'll usually write less code**. Specifying the inputs to a Software-defined Asset defines the assets it depends on. This means you don't need to use and to wire dependencies between ops. +- **You'll usually write less code**. Specifying the inputs to an asset definition defines the assets it depends on. This means you don't need to use and to wire dependencies between ops. This approach improves scalability by reducing the number of times an asset's name appears in your codebase by half. Refer to the [I/O manager-based example](#materialize-two-interdependent-tables-without-an-io-manager) below to see this in action. -- **You no longer have to choose between easy dependency tracking and manageable organization.** Without Software-defined Assets, you're often forced to: +- **You no longer have to choose between easy dependency tracking and manageable organization.** Without asset definitions, you're often forced to: - Contain everything in a single mega-job, which allows for easy dependency tracking but creates maintenance difficulties, OR - Split your pipeline into smaller jobs, which allows for easy maintenance but makes dependency tracking difficult @@ -60,14 +60,14 @@ Software-defined assets provide sizeable improvements when it comes to code ergo --- -## When should I use Software-defined Assets? +## When should I use asset definitions? -You should use Software-defined Assets when: +You should use asset definitions when: - You’re using Dagster to produce or maintain assets, AND - You know what those assets will be before you launch any runs. -Note that using Software-defined Assets in one job doesn’t mean they need to be used in all your jobs. If your use case doesn't meet these criteria, you can still use graphs and ops. +Note that using asset definitions in one job doesn’t mean they need to be used in all your jobs. If your use case doesn't meet these criteria, you can still use graphs and ops. Still not sure? Check out these examples to see what's a good fit and what isn't: @@ -145,11 +145,11 @@ Still not sure? Check out these examples to see what's a good fit and what isn't --- -## How do I upgrade jobs to use Software-defined Assets? +## How do I upgrade jobs to use asset definitions? -Let's say you've written jobs that you want to enrich using Software-defined Assets. Assuming assets are known and being updated, what would upgrading look like? +Let's say you've written jobs that you want to enrich using asset definitions. Assuming assets are known and being updated, what would upgrading look like? -Generally, every op output in a job that corresponds to a long-lived object in storage should have a Software-defined Asset. The following examples demonstrate some realistic Dagster jobs, both with and without Software-defined Assets: +Generally, every op output in a job that corresponds to a long-lived object in storage should have an asset definition. The following examples demonstrate some realistic Dagster jobs, both with and without asset definitions: - [A job that materializes two interdependent tables](#materialize-two-interdependent-tables) - [A job that materializes two interdependent tables without an I/O manager](#materialize-two-interdependent-tables-without-an-io-manager) @@ -157,8 +157,8 @@ Generally, every op output in a job that corresponds to a long-lived object in s This isn't an exhaustive list! We're adding the ability to define jobs that - materialize Software-defined Assets and then run arbitrary ops. Interested? - We'd love to hear from you in{" "} + materialize assets and then run arbitrary ops. Interested? We'd love to hear + from you in{" "} Slack {" "} @@ -172,11 +172,11 @@ Generally, every op output in a job that corresponds to a long-lived object in s ### Materialize two interdependent tables - + This example is a vanilla, op-based job that follows the idiomatic practice of delegating all I/O to I/O managers and input managers. -The goal of each op in the job is to produce an asset. However, because the job doesn't use the Software-defined Asset APIs, Dagster is unaware of this: +The goal of each op in the job is to produce an asset. However, because the job doesn't use the asset definition APIs, Dagster is unaware of this: ```python file=/guides/dagster/enriching_with_software_defined_assets/vanilla_io_manager.py from pandas import DataFrame @@ -212,18 +212,18 @@ defs = Definitions( ``` - + -Here's what an equivalent job looks like using Software-defined Assets: +Here's what an equivalent job looks like using asset definitions: ```python file=/guides/dagster/enriching_with_software_defined_assets/sda_io_manager.py from pandas import DataFrame -from dagster import Definitions, SourceAsset, asset, define_asset_job +from dagster import AssetSpec, Definitions, asset, define_asset_job from .mylib import s3_io_manager, snowflake_io_manager, train_recommender_model -raw_users = SourceAsset(key="raw_users", io_manager_key="warehouse") +raw_users = AssetSpec(key="raw_users", metadata={"dagster/io_manager_key": "warehouse"}) @asset(io_manager_key="warehouse") @@ -257,7 +257,7 @@ defs = Definitions( ### Materialize two interdependent tables without an I/O manager - + This example does the same things as the [previous example](#materialize-two-interdependent-tables), with one difference. This job performs I/O inside of the ops instead of delegating it to I/O managers and input managers: @@ -294,21 +294,19 @@ defs = Definitions( ``` - + -Here's an example of an equivalent job that uses Software-defined Assets: +Here's an example of an equivalent job that uses asset definitions: ```python file=/guides/dagster/enriching_with_software_defined_assets/sda_nothing.py from pandas import read_sql -from dagster import Definitions, SourceAsset, asset, define_asset_job +from dagster import Definitions, asset, define_asset_job from .mylib import create_db_connection, pickle_to_s3, train_recommender_model -raw_users = SourceAsset(key="raw_users") - -@asset(deps=[raw_users]) +@asset(deps=["raw_users"]) def users() -> None: raw_users_df = read_sql("select * from raw_users", con=create_db_connection()) users_df = raw_users_df.dropna() @@ -337,7 +335,7 @@ defs = Definitions( ### Not all ops produce assets - + This example demonstrates a job where some of the ops (`extract_products` and `get_categories`) don't produce assets of their own. Instead, they produce transient data that downstream ops will use to produce assets: @@ -382,11 +380,11 @@ defs = Definitions( ``` - + -Here's an equivalent job using Software-defined Assets. +Here's an equivalent job using asset definitions. -**Note:** Because some ops don't correspond to assets, this job uses and APIs and to wrap a graph in a Software-defined Asset: +**Note:** Because some ops don't correspond to assets, this job uses and APIs and to wrap a graph in an asset definition: ```python file=/guides/dagster/enriching_with_software_defined_assets/sda_graph.py from pandas import DataFrame @@ -440,23 +438,23 @@ defs = Definitions( --- -## How do Software-defined Assets work with other Dagster concepts? +## How do asset definitions work with other Dagster concepts? -Still not sure how Software-defined Assets fit into your current Dagster usage? In this section, we'll touch on how Software-defined Assets work with some of Dagster's core concepts. +Still not sure how asset definitions fit into your current Dagster usage? In this section, we'll touch on how asset definitions work with some of Dagster's core concepts. ### Ops and graphs -| Without Software-defined Assets | With Software-defined Assets | +| Without asset definitions | With asset definitions | | --------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | -| An [op](/concepts/ops-jobs-graphs/ops) is the basic unit of computation | Every Software-defined Asset includes a graph or an op | -| A [graph](/concepts/ops-jobs-graphs/graphs) is a composite unit of computation that connects multiple ops | Every Software-defined Asset includes a graph or an op | +| An [op](/concepts/ops-jobs-graphs/ops) is the basic unit of computation | Every asset definitions includes a graph or an op | +| A [graph](/concepts/ops-jobs-graphs/graphs) is a composite unit of computation that connects multiple ops | Every asset definitions includes a graph or an op | | Ops can have multiple outputs | Multiple assets can be produced by a single op when defined using the decorator | | Ops can use [config](/concepts/ops-jobs-graphs/ops#op-configuration) | Assets can use [config](/concepts/assets/software-defined-assets) | | Ops can access | Assets can access | -| Ops can require [resources](/concepts/resources) | Software-defined Assets can require [resources](/concepts/resources) | +| Ops can require [resources](/concepts/resources) | Asset definitions can require [resources](/concepts/resources) | | Ops can be tested by directly invoking them | Assets can be tested by directly invoking them | @@ -464,20 +462,20 @@ Still not sure how Software-defined Assets fit into your current Dagster usage? ### Jobs -| Without Software-defined Assets | With Software-defined Assets | -| --------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | -| A job targets a graph of ops | An asset job targets a selection of Software-defined Assets | -| Jobs can be [partitioned](/concepts/partitions-schedules-sensors/partitions) | Assets can be [partitioned](/concepts/partitions-schedules-sensors/partitions) | -| Jobs can be put on [schedules](/concepts/partitions-schedules-sensors/schedules) or [sensors](/concepts/partitions-schedules-sensors/sensors) | Asset jobs can be put on [schedules](/concepts/partitions-schedules-sensors/schedules) or [sensors](/concepts/partitions-schedules-sensors/sensors) | +| Without asset definitions | With asset definitions | +| --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- | +| A job targets a graph of ops | An asset job targets a selection of asset definitions | +| Jobs can be [partitioned](/concepts/partitions-schedules-sensors/partitions) | Assets can be [partitioned](/concepts/partitions-schedules-sensors/partitions) | +| Jobs can be put on [schedules](/concepts/automation/schedules) or [sensors](/concepts/partitions-schedules-sensors/sensors) | Asset jobs can be put on [schedules](/concepts/automation/schedules) or [sensors](/concepts/partitions-schedules-sensors/sensors) | ### Dagster types -| Without Software-defined Assets | With Software-defined Assets | +| Without asset definitions | With asset definitions | | ----------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| Op outputs and inputs can have [Dagster types](/concepts/types) | Software-defined assets can have [Dagster types](/concepts/types) | +| Op outputs and inputs can have [Dagster types](/concepts/types) | Asset definitions can have [Dagster types](/concepts/types) | | The `Nothing` Dagster type enables declaring that Dagster doesn't need to store or load the object corresponding to an op output or input | The [`deps`](/concepts/assets/software-defined-assets#defining-basic-dependencies) argument when defining an asset enables specifying dependencies without relying on Dagster to store or load objects corresponding to that dependency | @@ -485,7 +483,7 @@ Still not sure how Software-defined Assets fit into your current Dagster usage? ### Code locations -| Without Software-defined Assets | With Software-defined Assets | +| Without asset definitions | With asset definitions | | ------------------------------------------------------------------------------------------- | --------------------------------------------------------------------- | | Code locations () can contain jobs, schedules, and sensors | Code locations () can contain assets | @@ -494,7 +492,7 @@ Still not sure how Software-defined Assets fit into your current Dagster usage? ### I/O managers -| Without Software-defined Assets | With Software-defined Assets | +| Without asset definitions | With asset definitions | | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------ | | [I/O managers](/concepts/io-management/io-managers) can control how op inputs and outputs are loaded and stored | [I/O managers](/concepts/io-management/io-managers) can control how assets are loaded and stored | diff --git a/docs/content/guides/dagster/example_project.mdx b/docs/content/guides/dagster/example_project.mdx index c86f00540c3bb..6931f1b380526 100644 --- a/docs/content/guides/dagster/example_project.mdx +++ b/docs/content/guides/dagster/example_project.mdx @@ -46,7 +46,7 @@ This example shows useful patterns for many Dagster concepts, including: ### Organizing your assets in groups -[Software-defined assets](/concepts/assets/software-defined-assets) - An asset is a software object that models a data asset. The prototypical example is a table in a database or a file in cloud storage. +[Asset definitions](/concepts/assets/software-defined-assets) - An asset is a software object that models a data asset. The prototypical example is a table in a database or a file in cloud storage. This example contains [three asset groups](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/assets/\__init\_\_.py): @@ -81,7 +81,7 @@ The way we model resources helps separate the business logic in code from enviro ### Scheduling and triggering jobs -[Schedules](/concepts/partitions-schedules-sensors/schedules) - A schedule allows you to execute a [job](/concepts/ops-jobs-graphs/jobs) at a fixed interval. This example includes an [hourly schedule](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py) that materializes the `core` asset group every hour. +[Schedules](/concepts/automation/schedules) - A schedule allows you to execute a [job](/concepts/ops-jobs-graphs/jobs) at a fixed interval. This example includes an [hourly schedule](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/jobs.py) that materializes the `core` asset group every hour. [Sensors](/concepts/partitions-schedules-sensors/sensors) - A sensor allows you to instigate runs based on some external state change. In this example, we have sensors to react to different state changes: diff --git a/docs/content/guides/dagster/how-assets-relate-to-ops-and-graphs.mdx b/docs/content/guides/dagster/how-assets-relate-to-ops-and-graphs.mdx index 33d6e803028f5..09efd821b6e07 100644 --- a/docs/content/guides/dagster/how-assets-relate-to-ops-and-graphs.mdx +++ b/docs/content/guides/dagster/how-assets-relate-to-ops-and-graphs.mdx @@ -1,11 +1,11 @@ --- -title: Understanding how software-defined assets relate to ops and graphs | Dagster Docs -description: Dagster’s main abstraction for building data pipelines is the software-defined asset. However, Dagster also has ops and graphs. How do these relate? When should you use one over the other? +title: Understanding how asset definitions relate to ops and graphs | Dagster Docs +description: Dagster’s main abstraction for building data pipelines is the asset definition. However, Dagster also has ops and graphs. How do these relate? When should you use one over the other? --- -# Understanding how software-defined assets relate to ops and graphs +# Understanding how asset definitions relate to ops and graphs -Dagster’s main abstraction for building data pipelines is the _software-defined asset_. However, Dagster also has abstractions called _ops_ and _graphs_. +Dagster’s main abstraction for building data pipelines is the _asset definition_. However, Dagster also has abstractions called _ops_ and _graphs_. If you're not sure which one to use, this guide is for you. In this guide, we'll cover: @@ -16,7 +16,7 @@ If you're not sure which one to use, this guide is for you. In this guide, we'll ## When should I use assets or ops and graphs? -Dagster is mainly used to build data pipelines, and most data pipelines can be expressed in Dagster as sets of [software-defined assets](/concepts/assets/software-defined-assets). If you’re a new Dagster user **and** your goal is to build a data pipeline, we recommend starting with software-defined assets and not worrying about ops or graphs. This is because most of the code you’ll be writing will directly relate to producing data assets. +Dagster is mainly used to build data pipelines, and most data pipelines can be expressed in Dagster as sets of [asset definitions](/concepts/assets/software-defined-assets). If you’re a new Dagster user **and** your goal is to build a data pipeline, we recommend starting with asset definitions and not worrying about ops or graphs. This is because most of the code you’ll be writing will directly relate to producing data assets. However, there are some situations where you want to run code without thinking about data assets that the code is producing. In these cases, it’s appropriate to use [ops](/concepts/ops-jobs-graphs/ops) and [graphs](/concepts/ops-jobs-graphs/graphs). For example: @@ -32,9 +32,9 @@ You want to schedule a workflow where the goal is **not** to keep a set of data - Scan a data warehouse for tables that haven't been used in months and delete them - Record metadata about a set of data assets -In these cases, you should define your workflow in terms ops and graphs, not software-defined assets. The [Intro to ops and jobs guide](/guides/dagster/intro-to-ops-jobs) is a good place to start learning how to do this. +In these cases, you should define your workflow in terms ops and graphs, not asset definitions. The [Intro to ops and jobs guide](/guides/dagster/intro-to-ops-jobs) is a good place to start learning how to do this. -Additionally, note that a single Dagster deployment can contain software-defined assets and op/graph-based jobs side-by-side, which means that you’re not bound to one particular choice. If your workflow reads from software-defined assets, you can model that explicitly in Dagster, which is discussed in a [a later section](#op-graphs-that-read-from-an-asset). +Additionally, note that a single Dagster deployment can contain asset definitions and op/graph-based jobs side-by-side, which means that you’re not bound to one particular choice. If your workflow reads from asset definitions, you can model that explicitly in Dagster, which is discussed in a [a later section](#op-graphs-that-read-from-an-asset). ### Situation 2: You want to break an asset into multiple steps @@ -44,60 +44,60 @@ If you're in a situation like the following: - Some of the steps don't produce assets of their own - You need to be able to re-execute individual steps -In this case, you might want to use a [graph-backed asset](/concepts/assets/graph-backed-assets). This is discussed more [later in this guide](#graph-backed-assets). +In this case, you might want to use a [graph-backed asset](/concepts/assets/graph-backed-assets). This is discussed more [later in this guide](#graph-backed-asset-definitions). ### Situation 3: You’re anchored in task-based workflows -Task-based workflows have been a popular way of defining data pipelines for a long time. While we believe that software-defined assets provide a superior way of writing and operating data pipelines, we acknowledge that teams often have existing codebases or mindsets that are heavily anchored in task-based workflows. +Task-based workflows have been a popular way of defining data pipelines for a long time. While we believe that asset definitions provide a superior way of writing and operating data pipelines, we acknowledge that teams often have existing codebases or mindsets that are heavily anchored in task-based workflows. -Op-based graphs resemble task-based workflows very closely, so they’re a natural choice for data pipelines that want to stick to that paradigm, either permanently or temporarily, while migrating to software-defined assets. +Op-based graphs resemble task-based workflows very closely, so they’re a natural choice for data pipelines that want to stick to that paradigm, either permanently or temporarily, while migrating to asset definitions. --- ## How do assets relate to ops and graphs? -Next, we'll discuss how assets relate to ops and graphs. By the end of this section, you should understand how each type of software-defined asset relates to ops and graphs. +Next, we'll discuss how assets relate to ops and graphs. By the end of this section, you should understand how each type of asset definition relates to ops and graphs. -- [Software-defined assets](#software-defined-assets) -- [Multi-assets](#multi-assets) -- [Graph-backed assets](#graph-backed-assets) +- [Basic asset definitions](#basic-asset-definitions) +- [Multi-asset definitions](#multi-asset-definitions) +- [Graph-backed asset definitions](#graph-backed-asset-definitions) - [Op graphs that read from an asset](#op-graphs-that-read-from-an-asset) -### Software-defined assets +### Basic asset definitions -A software-defined asset is a description of how to compute the contents of a particular data asset. +An asset definitions is a description of how to compute the contents of a particular data asset. -Under the hood, every software-defined asset contains an op (or graph of ops), which is the function that’s invoked to compute its contents. In most cases, the underlying op is invisible to the user. +Under the hood, every asset definition contains an op (or graph of ops), which is the function that’s invoked to compute its contents. In most cases, the underlying op is invisible to the user. - +
      Op and software-defined asset
      -### Multi-assets +### Multi-asset definitions When you use the decorator, you’re defining a single op that produces multiple assets: - +
      Multi- software-defined asset
      -### Graph-backed assets +### Graph-backed asset definitions -Dagster supports composing a set of ops into an op graph, usually by using the decorator. A software-defined asset can be backed by an **op graph**, instead of an op. +Dagster supports composing a set of ops into an op graph, usually by using the decorator. An asset definition can be backed by an **op graph**, instead of an op. @@ -138,18 +138,16 @@ from dagster import asset, job, op @asset -def emails_to_send(): - ... +def emails_to_send(): ... @op -def send_emails(emails) -> None: - ... +def send_emails(emails) -> None: ... @job def send_emails_job(): - send_emails(emails_to_send.to_source_asset()) + send_emails(emails_to_send.get_asset_spec()) ``` In this case, the asset - specifically, the table the job reads from - is only used as a data source for the job. It’s not materialized when the graph is run. diff --git a/docs/content/guides/dagster/managing-ml.mdx b/docs/content/guides/dagster/managing-ml.mdx index a55d80c806406..4df5b51bca934 100644 --- a/docs/content/guides/dagster/managing-ml.mdx +++ b/docs/content/guides/dagster/managing-ml.mdx @@ -28,22 +28,20 @@ You might have thought about your data sources, feature sets, and the best model Whether you have a large or small model, Dagster can help automate data refreshes and model training based on your business needs. -[Auto-materializing assets](/concepts/assets/asset-auto-execution) can be used to update a machine learning model when the upstream data is updated. This can be done by setting the `AutoMaterializePolicy` to `eager`, which means that our machine learning model asset will be refreshed anytime our data asset is updated. +Auto-materializing assets can be used to update a machine learning model when the upstream data is updated. This can be done by setting the `AutoMaterializePolicy` to `eager`, which means that our machine learning model asset will be refreshed anytime our data asset is updated. ```python file=/guides/dagster/managing_ml/managing_ml_code.py startafter=eager_materilization_start endbefore=eager_materilization_end from dagster import AutoMaterializePolicy, asset @asset -def my_data(): - ... +def my_data(): ... @asset( auto_materialize_policy=AutoMaterializePolicy.eager(), ) -def my_ml_model(my_data): - ... +def my_ml_model(my_data): ... ``` Some machine learning models might more be cumbersome to retrain; it also might be less important to update them as soon as new data arrives. For this, a lazy auto-materialization policy which can be used in two different ways. The first, by using it with a `freshness_policy` as shown below. In this case, `my_ml_model` will only be auto-materialized once a week. @@ -53,16 +51,14 @@ from dagster import AutoMaterializePolicy, asset, FreshnessPolicy @asset -def my_other_data(): - ... +def my_other_data(): ... @asset( auto_materialize_policy=AutoMaterializePolicy.lazy(), freshness_policy=FreshnessPolicy(maximum_lag_minutes=7 * 24 * 60), ) -def my_other_ml_model(my_other_data): - ... +def my_other_ml_model(my_other_data): ... ``` This can be useful if you know that you want your machine learning model retrained at least once a week. While Dagster allows you to refresh a machine learning model as often as you like, best practice is to re-train as seldomly as possible. Model retraining can be costly to compute and having a minimal number of model versions can reduce the complexity of reproducing results at a later point in time. In this case, the model is updated once a week for `predictions`, ensuring that `my_ml_model` is retrained before it is used. @@ -72,24 +68,21 @@ from dagster import AutoMaterializePolicy, FreshnessPolicy, asset @asset -def some_data(): - ... +def some_data(): ... @asset(auto_materialize_policy=AutoMaterializePolicy.lazy()) -def some_ml_model(some_data): - ... +def some_ml_model(some_data): ... @asset( auto_materialize_policy=AutoMaterializePolicy.lazy(), freshness_policy=FreshnessPolicy(maximum_lag_minutes=7 * 24 * 60), ) -def predictions(some_ml_model): - ... +def predictions(some_ml_model): ... ``` -A more traditional schedule can also be used to update machine learning assets, causing them to be re-materialized or retrained on the latest data. For example, setting up a [cron schedule on a daily basis](/concepts/partitions-schedules-sensors/schedules). +A more traditional schedule can also be used to update machine learning assets, causing them to be re-materialized or retrained on the latest data. For example, setting up a [cron schedule on a daily basis](/concepts/automation/schedules). This can be useful if you have data that is also being scheduled on a cron schedule and want to add your machine model jobs to run on a schedule as well. diff --git a/docs/content/guides/dagster/migrating-to-pythonic-resources-and-config.mdx b/docs/content/guides/dagster/migrating-to-pythonic-resources-and-config.mdx index 9c5a945afc9f7..7502a518402e3 100644 --- a/docs/content/guides/dagster/migrating-to-pythonic-resources-and-config.mdx +++ b/docs/content/guides/dagster/migrating-to-pythonic-resources-and-config.mdx @@ -91,8 +91,7 @@ class FancyDbResource: def __init__(self, conn_string: str) -> None: self.conn_string = conn_string - def execute(self, query: str) -> None: - ... + def execute(self, query: str) -> None: ... @resource(config_schema={"conn_string": str}) def fancy_db_resource(context: InitResourceContext) -> FancyDbResource: @@ -124,8 +123,7 @@ from dagster import ConfigurableResource class FancyDbResource(ConfigurableResource): conn_string: str - def execute(self, query: str) -> None: - ... + def execute(self, query: str) -> None: ... ``` The attributes declared on a class inheriting from serve as the new way to declare a configuration schema. Now, however, there's a problem: You're migrating an existing codebase that contains numerous callsites to the old `fancy_db_resource` function annotated with `@resource`. You have declared the config schema twice, once on `@resource` and once on the class. This is fine for now as the config schema is simple, but for more complicated schemas this can be a problem. @@ -161,8 +159,7 @@ from dagster import AssetExecutionContext, ConfigurableResource, Definitions, as class FancyDbResource(ConfigurableResource): conn_string: str - def execute(self, query: str) -> None: - ... + def execute(self, query: str) -> None: ... @asset(required_resource_keys={"fancy_db"}) def asset_one(context: AssetExecutionContext) -> None: @@ -204,8 +201,7 @@ class FancyDbClient: def __init__(self, conn_string: str) -> None: self.conn_string = conn_string - def execute_query(self, query: str) -> None: - ... + def execute_query(self, query: str) -> None: ... # Alternatively could have been imported from third-party library # from fancy_db import FancyDbClient diff --git a/docs/content/guides/dagster/ml-pipeline.mdx b/docs/content/guides/dagster/ml-pipeline.mdx index 358d771be128f..3c22110c3514f 100644 --- a/docs/content/guides/dagster/ml-pipeline.mdx +++ b/docs/content/guides/dagster/ml-pipeline.mdx @@ -13,7 +13,7 @@ We will work through building a machine learning pipeline, including using asset ## Before you begin -This guide assumes you have familiarity with machine learning concepts and several Dagster concepts, including [software-defined assets](/concepts/assets/software-defined-assets) and [jobs](/concepts/ops-jobs-graphs/jobs). +This guide assumes you have familiarity with machine learning concepts and several Dagster concepts, including [asset definitions](/concepts/assets/software-defined-assets) and [jobs](/concepts/ops-jobs-graphs/jobs). --- @@ -122,7 +122,6 @@ def transformed_test_data(test_data, tfidf_vectorizer): X_test, y_test = test_data # Use the fitted tokenizer to transform the test dataset transformed_X_test = tfidf_vectorizer.transform(X_test) - transformed_y_test = np.array(y_test) y_test = y_test.fillna(0) transformed_y_test = np.array(y_test) return transformed_X_test, transformed_y_test diff --git a/docs/content/guides/dagster/recommended-project-structure.mdx b/docs/content/guides/dagster/recommended-project-structure.mdx index f89dce07ea332..e703426ae7c5a 100644 --- a/docs/content/guides/dagster/recommended-project-structure.mdx +++ b/docs/content/guides/dagster/recommended-project-structure.mdx @@ -54,6 +54,7 @@ project_fully_featured │   │   ├── recommender_model.py │   │   ├── user_story_matrix.py │   │   └── user_top_recommended_stories.py +│   ├── definitions.py │   ├── jobs.py │   ├── partitions.py │   ├── resources @@ -131,7 +132,7 @@ In this example, we grouped resources (e.g., database connections, Spark session In complex projects, we find it helpful to make resources reusable and configured with pre-defined values via . This approach allows your teammates to use a pre-defined resource set or make changes to shared resources, thus enabling more efficient project development. -This pattern also helps you easily execute jobs in different environments without code changes. In this example, we dynamically defined a code location based on the deployment in [`__init__.py`](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/\__init\_\_.py) and can keep all code the same across testing, local development, staging, and production. Read more about our recommendations in the [Transitioning data pipelines from Development to Production](/guides/dagster/transitioning-data-pipelines-from-development-to-production) guide. +This pattern also helps you easily execute jobs in different environments without code changes. In this example, we dynamically defined a code location based on the deployment in [`definitions.py`](https://github.com/dagster-io/dagster/blob/master/examples/project_fully_featured/project_fully_featured/definitions.py) and can keep all code the same across testing, local development, staging, and production. Read more about our recommendations in the [Transitioning data pipelines from Development to Production](/guides/dagster/transitioning-data-pipelines-from-development-to-production) guide. --- @@ -165,9 +166,11 @@ So far, we've discussed our recommendations for structuring a large project whic To include multiple code locations in a single project, you'll need to add a [configuration file](/guides/understanding-dagster-project-files#configuration-files) to your project: -- **If using Dagster Cloud**, add a [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml) to the root of your project. +- **If using Dagster+**, add a [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml) to the root of your project. - **If developing locally or deploying to your infrastructure**, add a `workspace.yaml` file to the root of your project. Refer to the [workspace files documentation](/concepts/code-locations/workspace-files) for more info. +You can see a working example of a Dagster project that has multiple code locations in our [cloud-examples/multi-location-project repo](https://github.com/dagster-io/cloud-examples/tree/main/multi-location-project). + --- ## For tests @@ -220,7 +223,7 @@ project_fully_featured -The software-defined asset APIs sit atop of the graph/job/op APIs and enable a novel approach to orchestration that puts assets at the forefront. +The asset definition APIs sit atop of the graph/job/op APIs and enable a novel approach to orchestration that puts assets at the forefront. In Dagster, an "asset" is a data product, an object produced by a data pipeline. Some examples are tables, machine learning models, or reports. -Conceptually, software-defined assets invert the typical relationship between assets and computation. Instead of defining a graph of ops and recording which assets those ops end up materializing, you define a set of assets. Each asset knows how to compute its contents from upstream assets. +Conceptually, asset definitions invert the typical relationship between assets and computation. Instead of defining a graph of ops and recording which assets those ops end up materializing, you define a set of assets. Each asset knows how to compute its contents from upstream assets. Taking a software-defined asset approach has a few main benefits: @@ -29,10 +29,10 @@ Here are our asset definitions that define tables we want to materialize. ```python file=../../assets_pandas_pyspark/assets_pandas_pyspark/assets/table_assets.py startafter=start_marker endbefore=end_marker import pandas as pd -from dagster import AssetKey, SourceAsset, asset +from dagster import AssetKey, AssetSpec, Definitions, asset from pandas import DataFrame -sfo_q2_weather_sample = SourceAsset( +sfo_q2_weather_sample = AssetSpec( key=AssetKey("sfo_q2_weather_sample"), description="Weather samples, taken every five minutes at SFO", metadata={"format": "csv"}, @@ -50,9 +50,12 @@ def daily_temperature_highs(sfo_q2_weather_sample: DataFrame) -> DataFrame: def hottest_dates(daily_temperature_highs: DataFrame) -> DataFrame: """Computes the 10 hottest dates.""" return daily_temperature_highs.nlargest(10, "max_tmpf") + + +defs = Definitions(assets=[sfo_q2_weather_sample, daily_temperature_highs, hottest_dates]) ``` -`sfo_q2_weather_sample` represents our base temperature table. It's a , meaning that we rely on it, but don't generate it. +`sfo_q2_weather_sample` represents our base temperature table. It's a table that's used by the assets in our pipeline, but that we're not responsible for generating. We define it using an , which allows us to specify its attributes without providing a function that materializes it. `daily_temperature_highs` represents a computed asset. It's derived by taking the `sfo_q2_weather_sample` table and applying the decorated function to it. Notice that it's defined using a pure function, a function with no side effects, just logical data transformation. The code for storing and retrieving the data in persistent storage will be supplied later on in an . This allows us to swap in different implementations in different environments. For example, in local development, we might want to store data in a local CSV file for easy testing. However in production, we would want to store data in a data warehouse. @@ -70,8 +73,8 @@ To load definitions such as assets and resources, we use object. -```python file=../../assets_pandas_pyspark/assets_pandas_pyspark/__init__.py startafter=gather_assets_start endbefore=gather_assets_end -# __init__.py +```python file=../../assets_pandas_pyspark/assets_pandas_pyspark/definitions.py startafter=gather_assets_start endbefore=gather_assets_end +# definitions.py from dagster import Definitions, load_assets_from_modules from .assets import table_assets @@ -145,8 +148,8 @@ def daily_temperature_high_diffs(daily_temperature_highs: SparkDF) -> SparkDF: Here's an extended version of `weather_assets` that contains the new asset: -```python file=../../assets_pandas_pyspark/assets_pandas_pyspark/__init__.py startafter=gather_spark_assets_start endbefore=gather_spark_assets_end -# __init__.py +```python file=../../assets_pandas_pyspark/assets_pandas_pyspark/definitions.py startafter=gather_spark_assets_start endbefore=gather_spark_assets_end +# definitions.py from dagster import Definitions, load_assets_from_modules @@ -206,9 +209,9 @@ class LocalFileSystemIOManager(ConfigurableIOManager): if isinstance(obj, PandasDF): directory = self._get_fs_path(context.asset_key) os.makedirs(directory, exist_ok=True) - open(os.path.join(directory, "_SUCCESS"), "wb").close() csv_path = os.path.join(directory, "part-00000.csv") obj.to_csv(csv_path) + open(os.path.join(directory, "_SUCCESS"), "wb").close() elif isinstance(obj, SparkDF): obj.write.format("csv").options(header="true").save( self._get_fs_path(context.asset_key), mode="overwrite" diff --git a/docs/content/guides/dagster/testing-assets.mdx b/docs/content/guides/dagster/testing-assets.mdx index 81a285a645ed8..2a4ac809d79d3 100644 --- a/docs/content/guides/dagster/testing-assets.mdx +++ b/docs/content/guides/dagster/testing-assets.mdx @@ -7,6 +7,8 @@ description: Dagster enables you to unit-test individual assets and graphs of as Creating testable and verifiable data pipelines is one of the focuses of Dagster. We believe ensuring data quality is critical for managing the complexity of data systems. Here, we'll cover how to write unit tests for individual assets, as well as for graphs of assets together. +**Note**: This guide describes approaches to unit testing assets, but **asset checks** can be used to test assets at runtime! Using asset checks, you can verify the quality of data produced by your pipelines, communicate what the data is expected to do, and more. Refer to the [Asset checks](/concepts/assets/asset-checks) documentation for more information and examples. + --- ## Prerequisites @@ -17,7 +19,7 @@ It also assumes that you have installed a test runner like [pytest](https://docs --- -## Testing an individual asset +## Testing individual assets We'll start by writing a test for the `topstories_word_cloud` asset definition, which is an image of a word cloud of the titles of top stories on Hacker News. To run the function that derives an asset from its upstream dependencies, we can invoke it directly, as if it's a regular Python function. @@ -62,6 +64,8 @@ def test_hackernews_assets(): assert len(df) == 100 ``` +--- + ## Running the tests Use pytest, or your test runner of choice, to run the unit tests. Navigate to the top-level `tutorial_project` directory (the one that contains the `tutorial_project_tests` directory) and run: diff --git a/docs/content/guides/dagster/transitioning-data-pipelines-from-development-to-production.mdx b/docs/content/guides/dagster/transitioning-data-pipelines-from-development-to-production.mdx index 95c0e9366c34a..1a5e42d425d7d 100644 --- a/docs/content/guides/dagster/transitioning-data-pipelines-from-development-to-production.mdx +++ b/docs/content/guides/dagster/transitioning-data-pipelines-from-development-to-production.mdx @@ -102,11 +102,11 @@ def stories(items: pd.DataFrame) -> pd.DataFrame: Now we can add these assets to our object and materialize them via the UI as part of our local development workflow. We can pass in credentials to our `SnowflakePandasIOManager`. ```python file=/guides/dagster/development_to_production/repository/repository_v1.py startafter=start endbefore=end -# __init__.py +# definitions.py from dagster_snowflake_pandas import SnowflakePandasIOManager from dagster import Definitions -from development_to_production.assets import comments, items, stories +from development_to_production.assets.hacker_news_assets import comments, items, stories # Note that storing passwords in configuration is bad practice. It will be resolved later in the guide. resources = { @@ -163,7 +163,7 @@ We want to store the assets in a production Snowflake database, so we need to up Instead, we can determine the configuration for resources based on the environment: ```python file=/guides/dagster/development_to_production/repository/repository_v2.py startafter=start endbefore=end -# __init__.py +# definitions.py # Note that storing passwords in configuration is bad practice. It will be resolved soon. resources = { @@ -207,7 +207,7 @@ We still have some problems with this setup: We can easily solve these problems using , which lets us source configuration for resources from environment variables. This allows us to store Snowflake configuration values as environment variables and point the I/O manager to those environment variables: ```python file=/guides/dagster/development_to_production/repository/repository_v3.py startafter=start endbefore=end -# __init__.py +# definitions.py resources = { @@ -241,7 +241,7 @@ defs = Definitions( Depending on your organization’s Dagster setup, there are a couple of options for a staging environment. -- **For Dagster Cloud users**, we recommend using [Branch Deployments](/dagster-cloud/managing-deployments/branch-deployments) as your staging step. A branch deployment is a new Dagster deployment that is automatically generated for each git branch. Check out our [comprehensive guide to branch deployments](/guides/dagster/branch_deployments) to learn how to use branch deployments to verify data pipelines before deploying them to production. +- **For Dagster+ users**, we recommend using [Branch Deployments](/dagster-plus/managing-deployments/branch-deployments) as your staging step. A branch deployment is a new Dagster deployment that is automatically generated for each git branch. Check out our [comprehensive guide to branch deployments](/guides/dagster/branch_deployments) to learn how to use branch deployments to verify data pipelines before deploying them to production. - **For a self-hosted staging deployment**, we’ve already done most of the necessary work to run our assets in staging! All we need to do is add another entry to the `resources` dictionary and set `DAGSTER_DEPLOYMENT=staging` in our staging deployment. diff --git a/docs/content/guides/dagster/using-environment-variables-and-secrets.mdx b/docs/content/guides/dagster/using-environment-variables-and-secrets.mdx index 4a9eb75423d2f..3221b132df485 100644 --- a/docs/content/guides/dagster/using-environment-variables-and-secrets.mdx +++ b/docs/content/guides/dagster/using-environment-variables-and-secrets.mdx @@ -29,7 +29,7 @@ DATABASE_USERNAME=salesteam DATABASE_PASSWORD=supersecretstagingpassword ``` -If Dagster detects a `.env` file in the same folder where `dagster-webserver` or `dagster-daemon` is launched, it will automatically load the environment variables in the file. This also applies to variables [exported from Dagster Cloud](/dagster-cloud/managing-deployments/environment-variables-and-secrets#exporting-variables-to-a-env-file). +If Dagster detects a `.env` file in the same folder where `dagster-webserver` or `dagster-daemon` is launched, it will automatically load the environment variables in the file. This also applies to variables [exported from Dagster+](/dagster-plus/managing-deployments/environment-variables-and-secrets#exporting-variables-to-a-env-file). When using a `.env` file, keep the following in mind: @@ -37,18 +37,18 @@ When using a `.env` file, keep the following in mind: - Any time the `.env` file is modified, the workspace must be re-loaded to make the Dagster webserver/UI aware of the changes
      - + -### Dagster Cloud +### Dagster+ -Environment variables can be set a variety of ways in Dagster Cloud: +Environment variables can be set a variety of ways in Dagster+: - Directly in the UI - Via agent configuration (Hybrid deployments only) -If using the UI, you can also [export locally-scoped variables to a `.env` file](/dagster-cloud/managing-deployments/environment-variables-and-secrets#exporting-variables-to-a-env-file), which you can then use to develop locally. +If using the UI, you can also [export locally-scoped variables to a `.env` file](/dagster-plus/managing-deployments/environment-variables-and-secrets#exporting-variables-to-a-env-file), which you can then use to develop locally. -Refer to the [Dagster Cloud environment variables guide](/dagster-cloud/managing-deployments/environment-variables-and-secrets) for more info. +Refer to the [Dagster+ environment variables guide](/dagster-plus/managing-deployments/environment-variables-and-secrets) for more info. @@ -94,7 +94,7 @@ import os database_name = os.getenv("DATABASE_NAME") ``` -This approach also works for accessing [built-in environment variables for Dagster Cloud](/dagster-cloud/managing-deployments/environment-variables-and-secrets#built-in-environment-variables): +This approach also works for accessing [built-in environment variables for Dagster+](/dagster-plus/managing-deployments/reserved-environment-variables): ```python import os @@ -102,7 +102,15 @@ import os deployment_name = os.getenv("DAGSTER_CLOUD_DEPLOYMENT_NAME") ``` -Refer to the [Dagster Cloud Branch Deployments example](#example-2-dagster-cloud-branch-deployments) for a real-world example. +Refer to the [Dagster+ Branch Deployments example](#example-2-dagster-branch-deployments) for a real-world example. + +You can also call the `get_value()` method on the `EnvVar`: + +```python +from dagster import EnvVar + +database_name = EnvVar('DATABASE_NAME').get_value() +``` ### From Dagster configuration @@ -125,6 +133,12 @@ For example: "access_token": EnvVar("GITHUB_ACCESS_TOKEN") ``` +And when specifying an integer number: + +```python +"database_port": EnvVar.int("DATABASE_PORT") +``` + @@ -190,7 +204,7 @@ Let's review what's happening here: As storing secrets in configuration is bad practice, we'll opt for using an environment variable. In this code, we're configuring the resource supplying it to our assets: ```python file=/guides/dagster/using_environment_variables_and_secrets/repository.py startafter=start endbefore=end -# __init__.py +# definitions.py from my_dagster_project import assets from my_dagster_project.resources import GithubClientResource @@ -217,7 +231,7 @@ Let's review what's happening here: Using environment variables, you define how your code should execute at runtime. - [Per-environment configuration](#example-1-per-environment-configuration) -- [Dagster Cloud Branch Deployments](#example-2-dagster-cloud-branch-deployments) +- [Dagster+ Branch Deployments](#example-2-dagster-branch-deployments) ### Example 1: Per-environment configuration @@ -226,7 +240,7 @@ In this example, we'll demonstrate how to use different I/O manager configuratio This example is adapted from the [Transitioning data pipelines from development to production guide](/guides/dagster/transitioning-data-pipelines-from-development-to-production): ```python file=/guides/dagster/using_environment_variables_and_secrets/repository_v2.py startafter=start_new endbefore=end_new -# __init__.py +# definitions.py resources = { "local": { @@ -262,11 +276,11 @@ Let's review what's happening here: - For both `local` and `production`, we constructed the I/O manager using environment-specific run configuration. Note the differences in configuration between `local` and `production`, specifically where environment variables were used. - Following the `resources` dictionary, we define the `deployment_name` variable, which determines the current executing environment. This variable defaults to `local`, ensuring that `DAGSTER_DEPLOYMENT=PRODUCTION` must be set to use the `production` configuration. -### Example 2: Dagster Cloud Branch Deployments +### Example 2: Dagster+ Branch Deployments -This section is applicable only to Dagster Cloud. +This section is applicable only to Dagster+. -Adapted from the [Testing against production with Dagster Cloud Branch Deployments guide](/guides/dagster/branch_deployments), this example demonstrates how to determine the current deployment type at runtime - [Branch Deployment](/dagster-cloud/managing-deployments/branch-deployments) or full deployment - without using resources or configuration. +Adapted from the [Testing against production with Dagster+ Branch Deployments guide](/guides/dagster/branch_deployments), this example demonstrates how to determine the current deployment type at runtime - [Branch Deployment](/dagster-plus/managing-deployments/branch-deployments) or full deployment - without using resources or configuration. Let's look at a function that determines the current deployment using the `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` environment variable: @@ -328,11 +342,11 @@ Using this info, we can write code that executes differently when in a Branch De changes.
    • - If using Dagster Cloud: + If using Dagster+:
      • Verify that the environment variable is{" "} - + scoped to the environment and code location {" "} if using the built-in secrets manager @@ -340,7 +354,7 @@ Using this info, we can write code that executes differently when in a Branch De
      • Verify that the environment variable was correctly configured and added to your{" "} - + agent's configuration
      • diff --git a/docs/content/guides/experimental-features.mdx b/docs/content/guides/experimental-features.mdx index 0903eac9f4e5f..cd67e1fb43ca9 100644 --- a/docs/content/guides/experimental-features.mdx +++ b/docs/content/guides/experimental-features.mdx @@ -19,3 +19,5 @@ title: Advanced and experimental feature guides | Dagster Docs ## Experimental features - [Asset versioning and caching](/guides/dagster/asset-versioning-and-caching) - Memoize assets using Dagster's data versioning system + +- [Linking to asset definition code with code references](/guides/dagster/code-references) - Attach code references to your Dagster assets to easily navigate to the code that backs the asset diff --git a/docs/content/guides/general.mdx b/docs/content/guides/general.mdx index 97c6d1a600f47..f8c9c93e6dd54 100644 --- a/docs/content/guides/general.mdx +++ b/docs/content/guides/general.mdx @@ -14,19 +14,21 @@ title: General Dagster Guides | Dagster Docs - [Transitioning data pipelines from development to production](/guides/dagster/transitioning-data-pipelines-from-development-to-production) - Learn how to seamlessly transition your Dagster pipelines from local development to production -- [Testing against production with Dagster Cloud Branch Deployments](/guides/dagster/branch_deployments) - Use Dagster Cloud Branch Deployments to quickly iterate on your Dagster code without impacting production data +- [Testing against production with Dagster+ Branch Deployments](/guides/dagster/branch_deployments) - Use Dagster+ Branch Deployments to quickly iterate on your Dagster code without impacting production data --- ## Working with data assets -- [Understanding how assets relate to ops and graphs](/guides/dagster/how-assets-relate-to-ops-and-graphs) - Learn how software-defined assets relate to ops and graphs, and when to use one over the other +- [Understanding how assets relate to ops and graphs](/guides/dagster/how-assets-relate-to-ops-and-graphs) - Learn how asset definitions relate to ops and graphs, and when to use one over the other -- [Moving to Software-defined Assets](/guides/dagster/enriching-with-software-defined-assets) - Already using ops and graphs, but not Software-defined Assets? Learn why and how to use Software-defined Assets +- [Moving to asset definitions](/guides/dagster/enriching-with-software-defined-assets) - Already using ops and graphs, but not asset definitions? Learn why and how to use asset definitions -- [Using Software-defined assets with Pandas and PySpark](/guides/dagster/software-defined-assets) - A quick introduction to Software-defined Assets, featuring Pandas and PySpark +- [Using asset checks to check data freshness](/concepts/assets/asset-checks/checking-for-data-freshness) - Use freshness checks, a type of [asset check](/concepts/assets/asset-checks) to identify the data assets that are overdue for an update -- [Testing assets](/guides/dagster/testing-assets) - Learn to test your Software-defined Assets +- [Using asset definitions with Pandas and PySpark](/guides/dagster/software-defined-assets) - A quick introduction to asset definitions, featuring Pandas and PySpark + +- [Testing assets](/guides/dagster/testing-assets) - Learn to test your asset definitions - [Migrating to Pythonic resources and config](/guides/dagster/migrating-to-pythonic-resources-and-config) - Incrementally migrate existing Dagster codebases to Pythonic resources and config diff --git a/docs/content/guides/integrations/approaches-to-writing-integrations.mdx b/docs/content/guides/integrations/approaches-to-writing-integrations.mdx new file mode 100644 index 0000000000000..c30ca82419327 --- /dev/null +++ b/docs/content/guides/integrations/approaches-to-writing-integrations.mdx @@ -0,0 +1,116 @@ +--- +title: "Approaches to writing integrations" +--- + +# Approaches to writing integrations + +There are many approaches to writing integrations in Dagster. The choice of approach depends on the specific requirements of the integration, the level of control needed, and the complexity of the external system being integrated. By reviewing the pros and cons of each approach, it is possible to make an informed decision on the best method for a specific use case. The following are typical approaches that align with Dagster's best practices. + +- Resource providers +- Factory methods +- Multi-Asset decorators +- Pipes protocol + +## Resource providers + +One of the most fundamental features that can be implemented in an integration is a resource object to interface with an external service. For example, the `dagster-snowflake` integration provides a custom [SnowflakeResource](https://github.com/dagster-io/dagster/blob/master/python_modules/libraries/dagster-snowflake/dagster_snowflake/resources.py) that is a wrapper around the Snowflake `connector` object. + +### Pros + +- **Simple** Implementing a resource wrapper is often the first step in flushing out a fully-featured integration. +- **Reusable** Resources are a core building block in the Dagster ecosystem, and allow one to re-use code across assets. + +### Cons + +- **Low-level abstraction** While the resource can be re-used throughout the codebase, it does not provide any higher level abstraction to assets or jobs. + +### Guide + +A guide for writing a resource based integration is coming soon! + +## Factory methods + +The factory pattern is used for creating multiple similar objects based on a set of specifications. This is often useful in the data engineering when you have similar processing that will operate on multiple objects with varying parameters. + +For example, imagine you would like to perform an operation on a set of tables in a database. You could construct a factory method that takes in a table specification, resulting in a list of assets. + +```python +from dagster import Definitions, asset + +parameters = [ + {"name": "asset1", "table": "users"}, + {"name": "asset2", "table": "orders"}, +] + + +def process_table(table_name: str) -> None: + pass + + +def build_asset(params): + @asset(name=params["name"]) + def _asset(): + process_table(params["table"]) + + return _asset + + +assets = [build_asset(params) for params in parameters] + +defs = Definitions(assets=assets) +``` + +### Pros + +- **Flexibility:** Allows for fine-grained control over the integration logic. +- **Modularity:** Easy to reuse components across different assets and jobs. +- **Explicit configuration:** Resources can be explicitly configured, making it clear what dependencies are required. + +### Cons + +- **Complexity:** Can be more complex to set up compared to other methods. +- **Boilerplate code:** May require more boilerplate code to define assets, resources, and jobs. + +### Guide + + + A guide for writing a factory method based integrations is coming soon! + + +## Multi-asset decorators + +In the scenario where a single API call or configuration can result in multiple assets, with a shared runtime or dependencies, one may consider creating a multi-asset decorator. Example implementations of this approach include [dbt](https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-dbt), [dlt](https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/dlt), and [Sling](https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/sling). + +### Pros + +- **Efficiency:** Allows defining multiple assets in a single function, reducing boilerplate code. +- **Simplicity:** Easier to manage related assets together. +- **Consistency:** Matches the the developer experience of the Dagster ecosystem by using decorator-based assets. + +### Cons + +- **Less granular control:** May not provide as much fine-grained control as defining individual assets. +- **Complexity in debugging:** Debugging issues can be more challenging when multiple assets are defined in a single function. + +### Guide + +- [Writing a multi-asset decorator integration](/guides/integrations/writing-a-multi-asset-decorator-integration) + +## Pipes protocol + +The Pipes protocol is used to integrate with systems that have their own execution environments. It enables running code in these external environments while allowing Dagster to maintain control and visibility. Example implementations of this approach include [AWS Lambda](https://github.com/dagster-io/dagster/tree/d4b4d5beabf6475c7279b7f02f893a506bca0bb0/python_modules/libraries/dagster-aws/dagster_aws/pipes), [Databricks](https://github.com/dagster-io/dagster/blob/d4b4d5beabf6475c7279b7f02f893a506bca0bb0/python_modules/libraries/dagster-databricks/dagster_databricks/pipes.py), and [Kubernetes](https://github.com/dagster-io/dagster/blob/d4b4d5beabf6475c7279b7f02f893a506bca0bb0/python_modules/libraries/dagster-k8s/dagster_k8s/pipes.py). + +### Pros + +- **Separation of Environments:** Allows running code in external environments, which can be useful for integrating with systems that have their own execution environments. +- **Flexibility:** Can integrate with a wide range of external systems and languages. +- **Streaming logs and metadata:** Provides support for streaming logs and structured metadata back into Dagster. + +### Cons + +- **Complexity:** Can be complex to set up and configure. +- **Overhead:** May introduce additional overhead for managing external environments. + +### Guide + +- [Dagster Pipes details and customization](/concepts/dagster-pipes/dagster-pipes-details-and-customization) diff --git a/docs/content/guides/integrations/writing-a-multi-asset-decorator-integration.mdx b/docs/content/guides/integrations/writing-a-multi-asset-decorator-integration.mdx new file mode 100644 index 0000000000000..ec33dd70f05d9 --- /dev/null +++ b/docs/content/guides/integrations/writing-a-multi-asset-decorator-integration.mdx @@ -0,0 +1,395 @@ +--- +title: "Writing a multi-asset decorator integration | Dagster Docs" +description: "This guide walks through how to write a decorator based multi-asset integration." +--- + +# Writing a multi-asset decorator integration + +When working in the Dagster ecosystem, you may have noticed that decorators are frequently used. For example, assets, jobs, and ops use decorators. If you have a service that produces many assets, it's possible to define it as a multi-asset decorator — offering a consistent and intuitive developer experience to existing Dagster APIs. + +In the context of Dagster, decorators are helpful because they often wrap some form of processing. For example, when writing an asset, you define your processing code and then annotate the function with the decorator. Then, the internal Dagster code can register the asset, assign metadata, pass in context data, or perform any other variety of operations that are required to integrate your asset code with the Dagster platform. + +In this guide, you'll learn how to develop a multi-asset integration for a hypothetical replication tool. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with Dagster +- An understanding of Python decorators — [Real Python's Primer on Python Decorators](https://realpython.com/primer-on-python-decorators/) is a fantastic introduction + +--- + +## Step 1: Input + +For this guide, let's imagine a tool that replicates data between two databases. It's configured using a `replication.yaml` configuration file, in which a user is able to define source and destination databases, along with the tables that they would like to replicate between these systems. + +```yaml +connections: + source: + type: duckdb + connection: example.duckdb + destination: + type: postgres + connection: postgresql://postgres:postgres@localhost/postgres + +tables: + - name: users + primary_key: id + - name: products + primary_key: id + - name: activity + primary_key: id +``` + +For the integration we're building, we want to provide a multi-asset that encompasses this replication process, and generates an asset for each table being replicated. + +We will define a dummy function named `replicate` that will mock the replication process, and return a dictionary with the replication status of each table. In the real world, this could be a function in a library, or a call to a command-line tool. + +```python +import yaml + +from pathlib import Path +from typing import Mapping, Iterator, Any + + +def replicate(replication_configuration_yaml: Path) -> Iterator[Mapping[str, Any]]: + data = yaml.safe_load(replication_configuration_yaml.read_text()) + for table in data.get("tables"): + # < perform replication here, and get status > + yield {"table": table.get("name"), "status": "success"} +``` + +--- + +## Step 2: Implementation + +First, let's define a `Project` object that takes in the path of our configuration YAML file. This will allow us to encapsulate the logic that gets metadata and table information from our project configuration. + +```python +import yaml +from pathlib import Path + + +class ReplicationProject(): + def __init__(self, replication_configuration_yaml: str): + self.replication_configuration_yaml = replication_configuration_yaml + + def load(self): + return yaml.safe_load(Path(self.replication_configuration_yaml).read_text()) +``` + +Next, define a function that returns a `multi_asset` function. The `multi_asset` function is a decorator itself, so this allows us to customize the behavior of `multi_asset` and create a new decorator of our own: + +```python +def custom_replication_assets( + *, + replication_project: ReplicationProject, + name: Optional[str] = None, + group_name: Optional[str] = None, +) -> Callable[[Callable[..., Any]], AssetsDefinition]: + project = replication_project.load() + + return multi_asset( + name=name, + group_name=group_name, + specs=[ + AssetSpec( + key=table.get("name"), + ) + for table in project.get("tables") + ], + ) +``` + +Let's review what this code does: + +- Defines a function that returns a `multi_asset` function +- Loads our replication project and iterates over the tables defined in the input YAML file +- Uses the tables to create a list of objects and passes them to the `specs` parameter, thus defining assets that will be visible in the Dagster UI + +Next, we'll show you how to perform the execution of the replication function. + +Recall that decorators allow us to wrap a function that performs some operation. In the case of our `multi_asset`, we defined objects for our tables, and the actual processing that takes place will be in the body of the decorated function. + +In this function, we will perform the replication, and then yield `AssetMaterialization` objects indicating that the replication was successful for a given table. + +```python +from dagster import AssetExecutionContext + + +replication_project_path = "replication.yaml" +replication_project = ReplicationProject(replication_project_path) + + +@custom_replication_assets( + replication_project=replication_project, + name="my_custom_replication_assets", + group_name="replication", +) +def my_assets(context: AssetExecutionContext): + results = replicate(Path(replication_project_path)) + for table in results: + if table.get("status") == "SUCCESS": + yield AssetMaterialization(asset_key=str(table.get("name")), metadata=table) +``` + +There are a few limitations to this approach: + +- **We have not encapsulated the logic for replicating tables.** This means that users who use the `custom_replication_assets` decorator would be responsible for yielding asset materializations themselves. +- **Users can't customize the attributes of the asset**. + +For the first limitation, we can resolve this by refactoring the code in the body of our asset function into a Dagster resource. + +--- + +## Step 3: Moving the replication logic into a resource + +Refactoring the replication logic into a resource enables us to support better configurability and reusability of our logic. + +To accomplish this, we will extend the object to create a custom resource. Then, we will define a `run` method that will perform the replication operation: + +```python +from dagster import ConfigurableResource +from dagster._annotations import public + + +class ReplicationResource(ConfigurableResource): + @public + def run( + self, replication_project: ReplicationProject + ) -> Iterator[AssetMaterialization]: + results = replicate(Path(replication_project.replication_configuration_yaml)) + for table in results: + if table.get("status") == "SUCCESS": + # NOTE: this assumes that the table name is the same as the asset key + yield AssetMaterialization( + asset_key=str(table.get("name")), metadata=table + ) +``` + +Now, we can refactor our `custom_replication_assets` instance to use this resource: + +```python +@custom_replication_assets( + replication_project=replication_project, + name="my_custom_replication_assets", + group_name="replication", +) +def my_assets(replication_resource: ReplicationProject): + replication_resource.run(replication_project) +``` + +--- + +## Step 4: Using translators + +At the end of [Step 2](#step-2-implementation), we mentioned that end users were unable to customize asset attributes, like the asset key, generated by our decorator. Translator classes are the recommended way of defining this logic, and they provide users with the option to override the default methods used to convert a concept from your tool (e.g. a table name) to the corresponding concept in Dagster (e.g. asset key). + +To start, we will define a translator method to map the table specification to a Dagster asset key. **Note**: in a real world integration you will want to define methods for all common attributes like dependencies, group names, and metadata. + +```python +from dagster import AssetKey, _check as check + +from dataclasses import dataclass + + +@dataclass +class ReplicationTranslator: + @public + def get_asset_key(self, table_definition: Mapping[str, str]) -> AssetKey: + return AssetKey(str(table_definition.get("name"))) +``` + +Next, we'll update `custom_replication_assets` to use the translator when defining the `key` on the . Note that we took this opportunity to also include the replication project and translator instance on the `AssetSpec` metadata. This is a workaround that we tend to employ in this approach, as it makes it possible to define these objects once and then access them on the context of our asset. + +```python +def custom_replication_assets( + *, + replication_project: ReplicationProject, + name: Optional[str] = None, + group_name: Optional[str] = None, + translator: Optional[ReplicationTranslator] = None, +) -> Callable[[Callable[..., Any]], AssetsDefinition]: + project = replication_project.load() + + translator = ( + check.opt_inst_param(translator, "translator", ReplicationTranslator) + or ReplicationTranslator() + ) + + return multi_asset( + name=name, + group_name=group_name, + specs=[ + AssetSpec( + key=translator.get_asset_key(table), + metadata={ + "replication_project": project, + "replication_translator": translator, + }, + ) + for table in project.get("tables") + ], + ) +``` + +Finally, we have to update our resource to use the translator and project provided in the metadata. We are using the `check` method provided by `dagster._check` to ensure that the type of the object is appropriate as we retrieve it from the metadata. + +Now, we can use the same `translator.get_asset_key` when yielding the asset materialization, thus ensuring that our asset declarations match our asset materializations: + +```python +class ReplicationResource(ConfigurableResource): + @public + def run(self, context: AssetExecutionContext) -> Iterator[AssetMaterialization]: + metadata_by_key = context.assets_def.metadata_by_key + first_asset_metadata = next(iter(metadata_by_key.values())) + + project = check.inst( + first_asset_metadata.get("replication_project"), + ReplicationProject, + ) + + translator = check.inst( + first_asset_metadata.get("replication_translator"), + ReplicationTranslator, + ) + + results = replicate(Path(project.replication_configuration_yaml)) + for table in results: + if table.get("status") == "SUCCESS": + yield AssetMaterialization( + asset_key=translator.get_asset_key(table), metadata=table + ) +``` + +--- + +## Conclusion + +In this guide we walked through how to define a custom multi-asset decorator, a resource for encapsulating tool logic, and a translator for defining the logic to translate a specification to Dagster concepts. + +Defining integrations with this approach aligns nicely with the overall development paradigm of Dagster, and is suitable for tools that generate many assets. + +The code in its entirety can be seen below: + +```python +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Callable, Iterator, Mapping, Optional + +import yaml + +from dagster import ( + AssetExecutionContext, + AssetKey, + AssetMaterialization, + AssetsDefinition, + AssetSpec, + ConfigurableResource, + multi_asset, +) +from dagster import _check as check +from dagster._annotations import public + + +def replicate(replication_configuration_yaml: Path) -> Iterator[Mapping[str, Any]]: + data = yaml.safe_load(replication_configuration_yaml.read_text()) + for table in data.get("tables"): + # < perform replication here, and get status > + yield {"table": table.get("name"), "status": "success"} + + +class ReplicationProject: + def __init__(self, replication_configuration_yaml: str): + self.replication_configuration_yaml = replication_configuration_yaml + + def load(self): + return yaml.safe_load(Path(self.replication_configuration_yaml).read_text()) + + +class ReplicationResource(ConfigurableResource): + @public + def run(self, context: AssetExecutionContext) -> Iterator[AssetMaterialization]: + metadata_by_key = context.assets_def.metadata_by_key + first_asset_metadata = next(iter(metadata_by_key.values())) + + project = check.inst( + first_asset_metadata.get("replication_project"), + ReplicationProject, + ) + + translator = check.inst( + first_asset_metadata.get("replication_translator"), + ReplicationTranslator, + ) + + results = replicate(Path(project.replication_configuration_yaml)) + for table in results: + if table.get("status") == "SUCCESS": + yield AssetMaterialization( + asset_key=translator.get_asset_key(table), metadata=table + ) + + +@dataclass +class ReplicationTranslator: + @public + def get_asset_key(self, table_definition: Mapping[str, str]) -> AssetKey: + return AssetKey(str(table_definition.get("name"))) + + +def custom_replication_assets( + *, + replication_project: ReplicationProject, + name: Optional[str] = None, + group_name: Optional[str] = None, + translator: Optional[ReplicationTranslator] = None, +) -> Callable[[Callable[..., Any]], AssetsDefinition]: + project = replication_project.load() + + translator = ( + check.opt_inst_param(translator, "translator", ReplicationTranslator) + or ReplicationTranslator() + ) + + return multi_asset( + name=name, + group_name=group_name, + specs=[ + AssetSpec( + key=translator.get_asset_key(table), + metadata={ + "replication_project": project, + "replication_translator": translator, + }, + ) + for table in project.get("tables") + ], + ) +``` + +--- + +## Related + + + + + + + diff --git a/docs/content/guides/limiting-concurrency-in-data-pipelines.mdx b/docs/content/guides/limiting-concurrency-in-data-pipelines.mdx index 88a2a2cf3ac15..010eb5a11e278 100644 --- a/docs/content/guides/limiting-concurrency-in-data-pipelines.mdx +++ b/docs/content/guides/limiting-concurrency-in-data-pipelines.mdx @@ -55,14 +55,14 @@ Concurrency is the ability of a system to execute multiple processes in parallel
    • - Software-defined asset + Asset definition - A Software-defined asset is a Dagster object that couples an asset to - the function and upstream assets used to produce its contents. An asset - is an object in persistent storage, such as a table, file, or persisted + An asset definition is a Dagster object that couples an asset to the + function and upstream assets used to produce its contents. An asset is + an object in persistent storage, such as a table, file, or persisted machine learning model.
      __init__.py - The __init__.py file includes a{" "} - object that contains all the definitions defined within your project. A definition can be an asset, a job, a schedule, a sensor, or a resource. This allows Dagster to load the definitions in a module. -

      - Refer to the Code locations documentation to learn other ways to deploy and load your Dagster code. + A file required in a Python package. Refer to the Python documentation for more information.
      assets.py A Python module that contains {" "} - software-defined assets. + asset definitions.

      Note: As your project grows, we recommend organizing assets in sub-modules. For example, you can put all analytics-related assets in a my_dagster_project/assets/analytics/ folder and use in the top-level definitions to load them, rather than needing to manually add assets to the top-level definitions every time - you define one. + you define one.

      Similarly, you can also use to load assets from single Python files. Refer to the Fully featured project guide for more info and best practices.
      definitions.py + The definitions.py file includes a{" "} + object that contains all the definitions defined within your project. A definition can be an asset, a job, a schedule, a sensor, or a resource. This allows Dagster to load the definitions in a module. +

      + Refer to the Code locations documentation to learn other ways to deploy and load your Dagster code. +
      @@ -154,7 +161,7 @@ Let's take a look at what each of these files and directories does: A build script with Python package dependencies for your new project as a package. Use this file to specify dependencies.

      - Note: If using Dagster Cloud, add dagster-cloud as a dependency. + Note: If using Dagster+, add dagster-cloud as a dependency. @@ -171,7 +178,7 @@ Let's take a look at what each of these files and directories does: ## Configuration files -Depending on your use case or if you're using Dagster Cloud, you may also need to add additional configuration files to your project. Refer to the [Example project structures section](#example-project-structures) for a look at how these files might fit into your projects. +Depending on your use case or if you're using Dagster+, you may also need to add additional configuration files to your project. Refer to the [Example project structures section](#example-project-structures) for a look at how these files might fit into your projects. - Cloud + Dagster+ @@ -215,9 +222,9 @@ Depending on your use case or if you're using Dagster Cloud, you may also need t more info, including a list of use cases and available options.

      - For Dagster Cloud + For Dagster+ Hybrid deployments - , this file can be used to + , this file can be used to customize the Hybrid agent . @@ -227,8 +234,8 @@ Depending on your use case or if you're using Dagster Cloud, you may also need t + + + + ); +}; diff --git a/docs/next/components/mdx/MDXComponents.tsx b/docs/next/components/mdx/MDXComponents.tsx index dcc6b503b61a7..1f591e618604b 100644 --- a/docs/next/components/mdx/MDXComponents.tsx +++ b/docs/next/components/mdx/MDXComponents.tsx @@ -16,14 +16,18 @@ import Zoom from 'react-medium-image-zoom'; import Icons from '../Icons'; import Link from '../Link'; -import {Note, Warning} from '../markdoc/Callouts'; +// import {Note, Warning} from '../markdoc/Callouts'; import 'react-medium-image-zoom/dist/styles.css'; import {RenderedDAG} from './RenderedDAG'; import EnvVarsBenefits from './includes/EnvVarsBenefits.mdx'; import EnvironmentVariablesIntro from './includes/EnvironmentVariablesIntro.mdx'; +import ExperimentalCallout from './includes/ExperimentalCallout.mdx'; import AddGitlabVariable from './includes/dagster-cloud/AddGitlabVariable.mdx'; import AddGitubRepositorySecret from './includes/dagster-cloud/AddGitubRepositorySecret.mdx'; +import ApplicableCloudPlan from './includes/dagster-cloud/ApplicableCloudPlan.mdx'; +import ApplicableDagsterProduct from './includes/dagster-cloud/ApplicableDagsterProduct.mdx'; +import AssetCheckAlerts from './includes/dagster-cloud/AssetCheckAlerts.mdx'; import BDCreateConfigureAgent from './includes/dagster-cloud/BDCreateConfigureAgent.mdx'; import GenerateAgentToken from './includes/dagster-cloud/GenerateAgentToken.mdx'; import ScimSupportedFeatures from './includes/dagster-cloud/ScimSupportedFeatures.mdx'; @@ -125,6 +129,59 @@ const PyObject: React.FunctionComponent<{ ); }; +//////////////////////// +// Callouts // +/////////////////////// + +const ADMONITION_STYLES = { + note: { + colors: { + bg: 'primary-100', + borderIcon: 'primary-500', + text: 'gray-900', + }, + icon: Icons.About, + }, + warning: { + colors: {bg: 'yellow-50', borderIcon: 'yellow-400', text: 'yellow-700'}, + icon: Icons.About, + }, +}; + +const Admonition = ({style, children}) => { + const {colors, icon} = ADMONITION_STYLES[style]; + return ( +
      +
      + {/* Make container for the svg element that aligns it with the top right of the parent flex container */} +
      + +
      +
      +
      {children}
      +
      +
      +
      + ); +}; + +export const Note = ({children}) => { + return {children}; +}; + +export const Warning = ({children}) => { + return {children}; +}; + + ////////////////////// // CHECK // ////////////////////// @@ -803,9 +860,13 @@ export default { DagsterVersion, DagsterDevTabs, StructuredEventLogs, + ExperimentalCallout, RawComputeLogs, AddGitlabVariable, AddGitubRepositorySecret, + ApplicableCloudPlan, + ApplicableDagsterProduct, + AssetCheckAlerts, GenerateAgentToken, ScimSupportedFeatures, BDCreateConfigureAgent, diff --git a/docs/next/components/mdx/MDXRenderer.tsx b/docs/next/components/mdx/MDXRenderer.tsx index a4968737969fd..51c98f749f838 100644 --- a/docs/next/components/mdx/MDXRenderer.tsx +++ b/docs/next/components/mdx/MDXRenderer.tsx @@ -4,13 +4,11 @@ import {SHOW_VERSION_NOTICE} from 'util/version'; import cx from 'classnames'; import Icons from 'components/Icons'; -import {RightSidebar} from 'components/SidebarNavigation'; import VersionDropdown from 'components/VersionDropdown'; import MDXComponents, {SearchIndexContext} from 'components/mdx/MDXComponents'; import hydrate from 'next-mdx-remote/hydrate'; import {MdxRemote} from 'next-mdx-remote/types'; import {NextSeo} from 'next-seo'; -import {useRouter} from 'next/router'; import React from 'react'; // The next-mdx-remote types are outdated. @@ -154,64 +152,6 @@ export const VersionedContentLayout = ({children, asPath = null}) => { ); }; -export function UnversionedMDXRenderer({ - data, - toggleFeedback, - bottomContent, -}: { - data: MDXData; - toggleFeedback: any; - bottomContent?: React.ReactNode | null; -}) { - const {mdxSource, frontMatter, searchIndex, tableOfContents, githubLink, asPath} = data; - - const content = hydrate(mdxSource, { - components, - provider: { - component: searchProvider, - props: {value: searchIndex}, - }, - }); - const navigationItemsForMDX = tableOfContents.items.filter((item) => item?.items); - - return ( - <> - -
      -
      - -
      -
      - {/* Start main area*/} - -
      -
      {content}
      - {bottomContent ?? null} -
      - {/* End main area */} -
      -
      - - - - ); -} - function VersionedMDXRenderer({data}: {data: MDXData}) { const {mdxSource, frontMatter, searchIndex} = data; diff --git a/docs/next/components/mdx/includes/EnvVarsBenefits.mdx b/docs/next/components/mdx/includes/EnvVarsBenefits.mdx index a8f64a6e560da..cbcdcc74a10d2 100644 --- a/docs/next/components/mdx/includes/EnvVarsBenefits.mdx +++ b/docs/next/components/mdx/includes/EnvVarsBenefits.mdx @@ -1,5 +1,5 @@ Using the `EnvVar` approach has a few unique benefits: - **Improved observability.** The UI will display information about configuration values sourced from environment variables. -- **Secret values are hidden in the UI.** Secret values are hidden in the Launchpaid, **Resources** page, and other places where configuration is displayed. -- **Simplified testing.** Because you can provide string values directly to configuration rather than environment variables, testing may be easier. \ No newline at end of file +- **Secret values are hidden in the UI.** Secret values are hidden in the Launchpad, **Resources** page, and other places where configuration is displayed. +- **Simplified testing.** Because you can provide string values directly to configuration rather than environment variables, testing may be easier. diff --git a/docs/next/components/mdx/includes/ExperimentalCallout.mdx b/docs/next/components/mdx/includes/ExperimentalCallout.mdx new file mode 100644 index 0000000000000..8fcfff1594341 --- /dev/null +++ b/docs/next/components/mdx/includes/ExperimentalCallout.mdx @@ -0,0 +1,3 @@ + + This feature is currently experimental. + \ No newline at end of file diff --git a/docs/next/components/mdx/includes/dagster-cloud/ApplicableCloudPlan.mdx b/docs/next/components/mdx/includes/dagster-cloud/ApplicableCloudPlan.mdx new file mode 100644 index 0000000000000..c29246c640e70 --- /dev/null +++ b/docs/next/components/mdx/includes/dagster-cloud/ApplicableCloudPlan.mdx @@ -0,0 +1 @@ +This guide focuses on a Dagster+ feature. A Pro plan is required to use this feature. \ No newline at end of file diff --git a/docs/next/components/mdx/includes/dagster-cloud/ApplicableDagsterProduct.mdx b/docs/next/components/mdx/includes/dagster-cloud/ApplicableDagsterProduct.mdx new file mode 100644 index 0000000000000..69b20d3222575 --- /dev/null +++ b/docs/next/components/mdx/includes/dagster-cloud/ApplicableDagsterProduct.mdx @@ -0,0 +1 @@ +This guide is applicable to Dagster+. \ No newline at end of file diff --git a/docs/next/components/mdx/includes/dagster-cloud/AssetCheckAlerts.mdx b/docs/next/components/mdx/includes/dagster-cloud/AssetCheckAlerts.mdx new file mode 100644 index 0000000000000..d168ad09367a3 --- /dev/null +++ b/docs/next/components/mdx/includes/dagster-cloud/AssetCheckAlerts.mdx @@ -0,0 +1,5 @@ +- **Alert policy type** - Asset alert +- **Target** - The asset keys or groups you've defined checks for. You can also target all assets. +- **Events** - Under **Asset checks**, check the box for the severity you've defined for failed checks: **Failed (WARN)** or **Failed (ERROR)** + +Refer to the [Managing alert policies in Dagster+](/dagster-plus/managing-deployments/alerts/managing-alerts-in-ui) for instructions on setting up alerts in the Dagster+ UI. \ No newline at end of file diff --git a/docs/next/components/mdx/includes/dagster-cloud/BDCreateConfigureAgent.mdx b/docs/next/components/mdx/includes/dagster-cloud/BDCreateConfigureAgent.mdx index 70b7fda50dcf3..1d74d88a02f26 100644 --- a/docs/next/components/mdx/includes/dagster-cloud/BDCreateConfigureAgent.mdx +++ b/docs/next/components/mdx/includes/dagster-cloud/BDCreateConfigureAgent.mdx @@ -8,7 +8,7 @@ Using the tabs, select your agent type to view instructions. -1. **Deploy an ECS agent to serve your branch deployments**. Follow the [ECS agent](/dagster-cloud/deployment/agents#amazon-ecs) setup guide, making sure to set the **Enable Branch Deployments** parameter if using the CloudFormation template. If you are running an existing agent, follow the [upgrade guide](/dagster-cloud/deployment/agents/amazon-ecs/upgrading-cloudformation-template) to ensure your template is up-to-date. Then, turn on the **Enable Branch Deployments** parameter. +1. **Deploy an ECS agent to serve your branch deployments**. Follow the [ECS agent](/dagster-plus/deployment/agents#amazon-ecs) setup guide, making sure to set the **Enable Branch Deployments** parameter if using the CloudFormation template. If you are running an existing agent, follow the [upgrade guide](/dagster-plus/deployment/agents/amazon-ecs/upgrading-cloudformation-template) to ensure your template is up-to-date. Then, turn on the **Enable Branch Deployments** parameter. 2. **Create a private [Amazon Elastic Registry (ECR) repository](https://console.aws.amazon.com/ecr/repositories).** Refer to the [AWS ECR documentation](https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-create.html) for instructions. @@ -40,7 +40,7 @@ Using the tabs, select your agent type to view instructions. -1. Set up a new Docker agent. Refer to the [Docker agent setup guide](/dagster-cloud/deployment/agents/docker) for instructions. +1. Set up a new Docker agent. Refer to the [Docker agent setup guide](/dagster-plus/deployment/agents/docker) for instructions. 2. After the agent is set up, modify the `dagster.yaml` file as follows: - Set the `dagster_cloud_api.branch_deployments` field to `true` @@ -73,7 +73,7 @@ Using the tabs, select your agent type to view instructions. -1. Set up a new Kubernetes agent. Refer to the [Kubernetes agent setup guide](/dagster-cloud/deployment/agents/kubernetes/configuring-running-kubernetes-agent) for instructions. +1. Set up a new Kubernetes agent. Refer to the [Kubernetes agent setup guide](/dagster-plus/deployment/agents/kubernetes/configuring-running-kubernetes-agent) for instructions. 2. After the agent is set up, modify your Helm values file to include the following: diff --git a/docs/next/components/mdx/includes/dagster-cloud/GenerateAgentToken.mdx b/docs/next/components/mdx/includes/dagster-cloud/GenerateAgentToken.mdx index c2bba636578bd..fd4afee8f7f2b 100644 --- a/docs/next/components/mdx/includes/dagster-cloud/GenerateAgentToken.mdx +++ b/docs/next/components/mdx/includes/dagster-cloud/GenerateAgentToken.mdx @@ -1,6 +1,6 @@ -In this step, you'll generate a token for the Dagster Cloud agent. The Dagster Cloud agent will use this to authenticate to the agent API. +In this step, you'll generate a token for the Dagster+ agent. The Dagster+ agent will use this to authenticate to the agent API. -1. Sign in to your Dagster Cloud instance. +1. Sign in to your Dagster+ instance. 2. Click the **user menu (your icon) > Organization Settings**. 3. In the **Organization Settings** page, click the **Tokens** tab. 4. Click the **+ Create agent token** button. diff --git a/docs/next/components/mdx/includes/dagster-cloud/ScimSupportedFeatures.mdx b/docs/next/components/mdx/includes/dagster-cloud/ScimSupportedFeatures.mdx index 39943bd7a5225..6932214474619 100644 --- a/docs/next/components/mdx/includes/dagster-cloud/ScimSupportedFeatures.mdx +++ b/docs/next/components/mdx/includes/dagster-cloud/ScimSupportedFeatures.mdx @@ -1,4 +1,4 @@ -- **Create users**. Users that are assigned to the Dagster Cloud application in the IdP will be automatically added to your Dagster Cloud organization. -- **Update user attributes.** Updating a user’s name or email address in the IdP will automatically sync the change to your user list in Dagster Cloud. -- **Remove users.** Deactivating or unassigning a user from the Dagster Cloud application in the IdP will remove them from the Dagster Cloud organization -- **Push user groups.** Groups and their members in the IdP can be pushed to Dagster Cloud as [Teams](/dagster-cloud/account/managing-users/managing-teams). \ No newline at end of file +- **Create users**. Users that are assigned to the Dagster+ application in the IdP will be automatically added to your Dagster+ organization. +- **Update user attributes.** Updating a user’s name or email address in the IdP will automatically sync the change to your user list in Dagster+. +- **Remove users.** Deactivating or unassigning a user from the Dagster+ application in the IdP will remove them from the Dagster+ organization +- **Push user groups.** Groups and their members in the IdP can be pushed to Dagster+ as [Teams](/dagster-plus/account/managing-users/managing-teams). \ No newline at end of file diff --git a/docs/next/components/mdx/includes/dagster/DagsterDevTabs.mdx b/docs/next/components/mdx/includes/dagster/DagsterDevTabs.mdx index 1c43732117940..5de04bc188bc6 100644 --- a/docs/next/components/mdx/includes/dagster/DagsterDevTabs.mdx +++ b/docs/next/components/mdx/includes/dagster/DagsterDevTabs.mdx @@ -20,20 +20,24 @@ dagster dev -f my_file.py -f my_second_file.py -Dagster can also load Python modules as [code locations](/concepts/code-locations). When this approach is used, Dagster loads the definitions defined at the top-level of the module, in a variable containing the object of its root `__init__.py` file. As this style of development eliminates an entire class of Python import errors, we strongly recommend it for Dagster projects deployed to production. +Dagster can also load Python modules as [code locations](/concepts/code-locations). When this approach is used, Dagster loads the definitions defined in the module passed to the command line. -In the following example, we used the `-m` argument to supply the name of the module: +We recommend defining a variable containing the object in a submodule named `definitions` inside the Python module. In practice, the submodule can be created by adding a file named `definitions.py` at the root level of the Python module. + +As this style of development eliminates an entire class of Python import errors, we strongly recommend it for Dagster projects deployed to production. + +In the following example, we used the `-m` argument to supply the name of the module and where to find the definitions: ```shell -dagster dev -m your_module_name +dagster dev -m your_module_name.definitions ``` -This command loads the definitions in the variable containing the object in the named module - defined as the root `__init__.py` file - in the current Python environment. +This command loads the definitions in the variable containing the object in the `definitions` submodule in the current Python environment. You can also include multiple modules at a time, where each module will be loaded as a code location: ```shell -dagster dev -m your_module_name -m your_second_module +dagster dev -m your_module_name.definitions -m your_second_module.definitions ``` --- @@ -45,7 +49,7 @@ To load definitions without supplying command line arguments, you can use the `p ```toml [tool.dagster] -module_name = "your_module_name" ## name of project's Python module +module_name = "your_module_name.definitions" ## name of project's Python module and where to find the definitions code_location_name = "your_code_location_name" ## optional, name of code location to display in the Dagster UI ``` @@ -58,7 +62,14 @@ dagster dev Instead of this: ```shell -dagster dev -m your_module_name +dagster dev -m your_module_name.definitions +``` + +You can also include multiple modules at a time using the `pyproject.toml` file, where each module will be loaded as a code location: + +```toml +[tool.dagster] +modules = [{ type = "module", name = "foo" }, { type = "module", name = "bar" }] ``` --- diff --git a/docs/next/components/mdx/includes/dagster/DagsterVersion.mdx b/docs/next/components/mdx/includes/dagster/DagsterVersion.mdx index b19aea0ceafdd..a40c2a516cb45 100644 --- a/docs/next/components/mdx/includes/dagster/DagsterVersion.mdx +++ b/docs/next/components/mdx/includes/dagster/DagsterVersion.mdx @@ -1 +1 @@ -Dagster supports Python 3.8 through 3.11. +Dagster supports Python 3.8 through 3.12. diff --git a/docs/next/components/mdx/includes/dagster/integrations/DbtModelAssetExplanation.mdx b/docs/next/components/mdx/includes/dagster/integrations/DbtModelAssetExplanation.mdx index 4fb09029ff0bc..628a25cc08e2b 100644 --- a/docs/next/components/mdx/includes/dagster/integrations/DbtModelAssetExplanation.mdx +++ b/docs/next/components/mdx/includes/dagster/integrations/DbtModelAssetExplanation.mdx @@ -1,10 +1,10 @@ -Dagster’s [software-defined assets](/concepts/assets/software-defined-assets) (SDAs) bear several similarities to dbt models. A software-defined asset contains an asset key, a set of upstream asset keys, and an operation that is responsible for computing the asset from its upstream dependencies. Models defined in a dbt project can be interpreted as Dagster SDAs: +Dagster’s [asset definitions](/concepts/assets/software-defined-assets) bear several similarities to dbt models. An asset definition contains an asset key, a set of upstream asset keys, and an operation that is responsible for computing the asset from its upstream dependencies. Models defined in a dbt project can be interpreted as Dagster asset definitions: - The asset key for a dbt model is (by default) the name of the model. - The upstream dependencies of a dbt model are defined with `ref` or `source` calls within the model's definition. - The computation required to compute the asset from its upstream dependencies is the SQL within the model's definition. -These similarities make it natural to interact with dbt models as SDAs. Let’s take a look at a dbt model and an SDA, in code: +These similarities make it natural to interact with dbt models as asset definitions. Let’s take a look at a dbt model and an asset definition, in code: // NOTE: This file should not be edited -// see https://nextjs.org/docs/basic-features/typescript for more information. +// see https://nextjs.org/docs/pages/building-your-application/configuring/typescript for more information. diff --git a/docs/next/package.json b/docs/next/package.json index 419eb1e5e9003..bd36351b7d4c6 100644 --- a/docs/next/package.json +++ b/docs/next/package.json @@ -3,7 +3,7 @@ "version": "0.1.0", "private": true, "scripts": { - "dev": "PORT=3001 next-remote-watch ../content", + "dev": "NEXT_PUBLIC_ENV=development PORT=3001 next-remote-watch ../content", "build": "next build", "build-master": "VERSIONING_DISABLED=true next build", "start": "next start", @@ -23,13 +23,14 @@ "@markdoc/next.js": "^0.2.2", "@mdx-js/loader": "^1.6.22", "@next/mdx": "^12.2.2", + "@next/third-parties": "^14.2.4", "@tailwindcss/forms": "^0.2.1", "@tailwindcss/typography": "^0.4.0", "@types/mdx": "^2.0.2", "add": "^2.0.6", "amator": "^1.1.0", "autoprefixer": "^10.1.0", - "axios": "^1.6.5", + "axios": "^1.7.4", "classnames": "^2.2.6", "eslint-config-next": "^13.4.19", "fast-glob": "^3.2.5", @@ -39,7 +40,7 @@ "lodash": "^4.17.21", "mdast-util-toc": "^5.1.0", "new-github-issue-url": "^0.2.1", - "next": "^13.5.4", + "next": "^14.2.10", "next-mdx-remote": "^2.1.4", "next-remote-watch": "^2.0.0", "next-seo": "^4.17.0", @@ -71,7 +72,7 @@ "@types/jest": "^29.5.3", "@types/node": "^14.14.22", "@types/react": "^18.2.47", - "eslint": "^8.45.0", + "eslint": "^8.57.0", "eslint-plugin-dagster-rules": "link:../../js_modules/dagster-ui/packages/eslint-config/rules", "html-react-parser": "^2.0.0", "jest": "^29.6.2", @@ -80,7 +81,7 @@ "remark-cli": "^9.0.0", "remark-gfm": "^1.0.0", "remark-preset-prettier": "^0.4.1", - "typescript": "5.0.4" + "typescript": "5.4.5" }, "packageManager": "yarn@3.6.4" } diff --git a/docs/next/pages/[...page].tsx b/docs/next/pages/[...page].tsx index c6d2e6e77804c..d295cc0adef46 100644 --- a/docs/next/pages/[...page].tsx +++ b/docs/next/pages/[...page].tsx @@ -56,7 +56,7 @@ function HTMLRenderer({data}: {data: HTMLData}) { /> -
      dagster_cloud.yaml - Defines code locations for Dagster Cloud. Refer to the{" "} - + Defines code locations for Dagster+. Refer to the{" "} + dagster_cloud.yaml reference {" "} for more info. @@ -239,9 +246,9 @@ Depending on your use case or if you're using Dagster Cloud, you may also need t
      deployment_settings.yaml - Configures settings for full deployments in Dagster Cloud, including run + Configures settings for full deployments in Dagster+, including run queue priority and concurrency limits. Refer to the{" "} - + Deployment settings reference {" "} for more info. @@ -291,7 +298,8 @@ For local development, a project with a single code location might look like thi ├── README.md ├── my_dagster_project │   ├── __init__.py -│   └── assets.py +│   ├── assets.py +│   └── definitions.py ├── my_dagster_project_tests ├── dagster.yaml ## optional, used for instance settings ├── pyproject.toml ## optional, used to define the project as a module @@ -312,7 +320,8 @@ For local development, a project with multiple code locations might look like th ├── README.md ├── my_dagster_project │   ├── __init__.py -│   └── assets.py +│   ├── assets.py +│   └── definitions.py ├── my_dagster_project_tests ├── dagster.yaml ## optional, used for instance settings ├── pyproject.toml @@ -338,7 +347,8 @@ A Dagster project deployed to your infrastructure might look like this: ├── README.md ├── my_dagster_project │   ├── __init__.py -│   └── assets.py +│   ├── assets.py +│   └── definitions.py ├── my_dagster_project_tests ├── dagster.yaml ## optional, used for instance settings ├── pyproject.toml @@ -348,23 +358,24 @@ A Dagster project deployed to your infrastructure might look like this: └── workspace.yaml ## defines multiple code locations ``` -### Dagster Cloud +### Dagster+ -Depending on the type of deployment you're using in Dagster Cloud - Serverless or Hybrid - your project structure might look slightly different. Click the tabs for more info. +Depending on the type of deployment you're using in Dagster+ - Serverless or Hybrid - your project structure might look slightly different. Click the tabs for more info. #### Serverless deployment -For a Dagster Cloud Serverless deployment, a project might look like this: +For a Dagster+ Serverless deployment, a project might look like this: ```shell . ├── README.md ├── my_dagster_project │   ├── __init__.py -│   └── assets.py +│   ├── assets.py +│   └── definitions.py ├── my_dagster_project_tests ├── dagster_cloud.yaml ## defines code locations ├── deployment_settings.yaml ## optional, defines settings for full deployments @@ -379,14 +390,15 @@ For a Dagster Cloud Serverless deployment, a project might look like this: #### Hybrid deployment -For a Dagster Cloud Hybrid deployment, a project might look like this: +For a Dagster+ Hybrid deployment, a project might look like this: ```shell . ├── README.md ├── my_dagster_project │   ├── __init__.py -│   └── assets.py +│   ├── assets.py +│   └── definitions.py ├── my_dagster_project_tests ├── dagster.yaml ## optional, hybrid agent custom configuration ├── dagster_cloud.yaml ## defines code locations @@ -430,7 +442,7 @@ To sustainably scale your project, check out our best practices and recommendati href="/concepts/code-locations" > diff --git a/docs/content/integrations.mdx b/docs/content/integrations.mdx index 7f95c185c72c3..122dd9934f6d3 100644 --- a/docs/content/integrations.mdx +++ b/docs/content/integrations.mdx @@ -11,11 +11,47 @@ Using our integration guides and libraries, you can extend Dagster to interopera ## Guides +Explore guides for integrations with external services. + + + Looking for deployment options? + + + + + + + + - - + - - - - - - - - - + + + + diff --git a/docs/content/integrations/airbyte-cloud.mdx b/docs/content/integrations/airbyte-cloud.mdx index ab3876b176454..f32e07f1e7575 100644 --- a/docs/content/integrations/airbyte-cloud.mdx +++ b/docs/content/integrations/airbyte-cloud.mdx @@ -1,9 +1,9 @@ --- -title: "Using Dagster with Airbyte Cloud" +title: "Airbyte Cloud & Dagster | Dagster Docs" description: Integrate your Airbyte Cloud connections into Dagster. --- -# Using Airbyte Cloud with Dagster +# Airbyte Cloud & Dagster Using self-hosted Airbyte? Check out the{" "} @@ -27,9 +27,9 @@ This guide focuses on how to work with Airbyte Cloud connections using Dagster's --- -## Airbyte Cloud connections and Dagster software-defined assets +## Airbyte Cloud connections and Dagster assets -An [Airbyte Cloud connection](https://docs.airbyte.com/understanding-airbyte/connections/) defines a series of data streams which are synced between a source and a destination. During a sync, a replica of the data from each data stream is written to the destination, typically as one or more tables. Dagster represents each of the replicas generated in the destination as a software-defined asset. This enables you to easily: +An [Airbyte Cloud connection](https://docs.airbyte.com/understanding-airbyte/connections/) defines a series of data streams which are synced between a source and a destination. During a sync, a replica of the data from each data stream is written to the destination, typically as one or more tables. Dagster represents each of the replicas generated in the destination as an asset. This enables you to easily: - Visualize the streams involved in an Airbyte Cloud connection and execute a sync from Dagster - Define downstream computations which depend on replicas produced by Airbyte @@ -45,7 +45,7 @@ To get started, you will need to install the `dagster` and `dagster-airbyte` Pyt pip install dagster dagster-airbyte ``` -You'll also need to have an Airbyte Cloud account, and have created an Airbyte API Key. For more information, see the [Airbyte API docs](https://reference.airbyte.com/reference/start). +You'll also need to have an Airbyte Cloud account, and have created an Airbyte client ID and client secret. For more information, see the [Airbyte API docs](https://reference.airbyte.com/reference/getting-started) and [Airbyte authentication guide](https://reference.airbyte.com/reference/authentication). --- @@ -58,17 +58,18 @@ from dagster import EnvVar from dagster_airbyte import AirbyteCloudResource airbyte_instance = AirbyteCloudResource( - api_key=EnvVar("AIRBYTE_API_KEY"), + client_id=EnvVar("AIRBYTE_CLIENT_ID"), + client_secret=EnvVar("AIRBYTE_CLIENT_SECRET"), ) ``` -Here, the API key is provided using an . For more information on setting environment variables in a production setting, see [Using environment variables and secrets](/guides/dagster/using-environment-variables-and-secrets). +Here, the client ID and client secret are provided using an . For more information on setting environment variables in a production setting, see [Using environment variables and secrets](/guides/dagster/using-environment-variables-and-secrets). --- ## Step 2: Building Airbyte Cloud assets using Dagster -In order to create software-defined assets for your Airbyte Cloud connections, you will first need to determine the connection IDs for each of the connections you would like to build assets for. The connection ID can be seen in the URL of the connection page when viewing the Airbyte Cloud UI, located between `/connections/` and `/status`. +In order to create asset definitions for your Airbyte Cloud connections, you will first need to determine the connection IDs for each of the connections you would like to build assets for. The connection ID can be seen in the URL of the connection page when viewing the Airbyte Cloud UI, located between `/connections/` and `/status`. For example, the connection ID for the URL `https://cloud.airbyte.com/workspaces/11f3741b-0b54-45f8-9886-937f96f2ba88/connections/43908042-8399-4a58-82f1-71a45099fff7/status` is `43908042-8399-4a58-82f1-71a45099fff7`. @@ -81,7 +82,7 @@ For example, the connection ID for the URL `https://cloud.airbyte.com/workspaces /> -Then, supply the connection ID and the list of tables which the connection creates in the destination to `build_airbyte_assets`. This utility will generate a set of software-defined assets corresponding to the tables which Airbyte creates in the destination. +Then, supply the connection ID and the list of tables which the connection creates in the destination to `build_airbyte_assets`. This utility will generate a set of f corresponding to the tables which Airbyte creates in the destination. ```python startafter=start_manually_define_airbyte_assets_cloud endbefore=end_manually_define_airbyte_assets_cloud file=/integrations/airbyte/airbyte.py dedent=4 from dagster_airbyte import build_airbyte_assets @@ -104,14 +105,15 @@ from dagster_airbyte import build_airbyte_assets, AirbyteCloudResource from dagster import Definitions, EnvVar airbyte_instance = AirbyteCloudResource( - api_key=EnvVar("AIRBYTE_API_KEY"), + client_id=EnvVar("AIRBYTE_CLIENT_ID"), + client_secret=EnvVar("AIRBYTE_CLIENT_SECRET"), ) airbyte_assets = build_airbyte_assets( connection_id="43908042-8399-4a58-82f1-71a45099fff7", destination_tables=["releases", "tags", "teams"], ) -defs = Definitions(assets=[airbyte_assets], resources={"airbyte": airbyte_instance}) +defs = Definitions(assets=airbyte_assets, resources={"airbyte": airbyte_instance}) ``` --- @@ -153,7 +155,8 @@ from dagster_snowflake_pandas import SnowflakePandasIOManager import pandas as pd airbyte_instance = AirbyteCloudResource( - api_key=EnvVar("AIRBYTE_API_KEY"), + client_id=EnvVar("AIRBYTE_CLIENT_ID"), + client_secret=EnvVar("AIRBYTE_CLIENT_SECRET"), ) airbyte_assets = build_airbyte_assets( connection_id="43908042-8399-4a58-82f1-71a45099fff7", @@ -168,7 +171,7 @@ def stargazers_file(stargazers: pd.DataFrame): # only run the airbyte syncs necessary to materialize stargazers_file my_upstream_job = define_asset_job( "my_upstream_job", - AssetSelection.keys("stargazers_file") + AssetSelection.assets(stargazers_file) .upstream() # all upstream assets (in this case, just the stargazers Airbyte asset) .required_multi_asset_neighbors(), # all Airbyte assets linked to the same connection ) @@ -207,7 +210,8 @@ from dagster_airbyte import ( from dagster_snowflake import SnowflakeResource airbyte_instance = AirbyteCloudResource( - api_key=EnvVar("AIRBYTE_API_KEY"), + client_id=EnvVar("AIRBYTE_CLIENT_ID"), + client_secret=EnvVar("AIRBYTE_CLIENT_SECRET"), ) airbyte_assets = build_airbyte_assets( connection_id="43908042-8399-4a58-82f1-71a45099fff7", @@ -226,7 +230,7 @@ def stargazers_file(snowflake: SnowflakeResource): # only run the airbyte syncs necessary to materialize stargazers_file my_upstream_job = define_asset_job( "my_upstream_job", - AssetSelection.keys("stargazers_file") + AssetSelection.assets(stargazers_file) .upstream() # all upstream assets (in this case, just the stargazers Airbyte asset) .required_multi_asset_neighbors(), # all Airbyte assets linked to the same connection ) @@ -261,7 +265,8 @@ from dagster import ( ) airbyte_instance = AirbyteCloudResource( - api_key=EnvVar("AIRBYTE_API_KEY"), + client_id=EnvVar("AIRBYTE_CLIENT_ID"), + client_secret=EnvVar("AIRBYTE_CLIENT_SECRET"), ) airbyte_assets = build_airbyte_assets( connection_id="43908042-8399-4a58-82f1-71a45099fff7", @@ -283,7 +288,7 @@ defs = Definitions( ) ``` -Refer to the [Schedule documentation](/concepts/partitions-schedules-sensors/schedules#running-the-scheduler) for more info on running jobs on a schedule. +Refer to the [Schedule documentation](/concepts/automation/schedules) for more info on running jobs on a schedule. --- @@ -319,14 +324,14 @@ If you have questions on using Airbyte with Dagster, we'd love to hear from you: > diff --git a/docs/content/integrations/airbyte.mdx b/docs/content/integrations/airbyte.mdx index 707febc3e0b91..ee6dc64856358 100644 --- a/docs/content/integrations/airbyte.mdx +++ b/docs/content/integrations/airbyte.mdx @@ -1,9 +1,9 @@ --- -title: "Using Dagster with Airbyte" +title: "Airbyte & Dagster | Dagster Docs" description: Integrate your Airbyte connections into Dagster. --- -# Using Airbyte with Dagster +# Airbyte & Dagster Using Airbyte Cloud? Check out the{" "} @@ -25,9 +25,9 @@ This guide focuses on how to work with Airbyte connections using Dagster's [soft --- -## Airbyte connections and Dagster software-defined assets +## Airbyte connections and Dagster assets -An [Airbyte connection](https://docs.airbyte.com/understanding-airbyte/connections/) defines a series of data streams which are synced between a source and a destination. During a sync, a replica of the data from each data stream is written to the destination, typically as one or more tables. Dagster represents each of the replicas generated in the destination as a software-defined asset. This enables you to easily: +An [Airbyte connection](https://docs.airbyte.com/understanding-airbyte/connections/) defines a series of data streams which are synced between a source and a destination. During a sync, a replica of the data from each data stream is written to the destination, typically as one or more tables. Dagster represents each of the replicas generated in the destination as an asset. This enables you to easily: - Visualize the streams involved in an Airbyte connection and execute a sync from Dagster - Define downstream computations which depend on replicas produced by Airbyte @@ -73,10 +73,7 @@ If you're hosting Airbyte externally, you'll need to provide a hostname where th ## Step 2: Loading Airbyte asset definitions into Dagster -The easiest way to get started using Airbyte with Dagster is to have Dagster automatically generate asset defintions from your Airbyte project. This can be done in one of two ways: - -- [Load asset definitions from an Airbyte instance via API](#loading-airbyte-asset-definitions-from-an-airbyte-instance), at initialization time -- [Load asset definitions from Airbyte YAML configuration files](#loading-airbyte-asset-definitions-from-yaml-config), generated by Airbyte's [Octavia CLI](https://github.com/airbytehq/airbyte/tree/master/octavia-cli#what-is-octavia-cli) +The easiest way to get started using Airbyte with Dagster is to have Dagster automatically generate asset definitions from your Airbyte project. Dagster can [load asset definitions from an Airbyte instance via API](#loading-airbyte-asset-definitions-from-an-airbyte-instance) at initialization time. You can also [manually-build asset definitions](#manually-building-airbyte-asset-definitions) on a per-connection basis. @@ -95,13 +92,26 @@ from dagster_airbyte import load_assets_from_airbyte_instance airbyte_assets = load_assets_from_airbyte_instance(airbyte_instance) ``` -The `load_assets_from_airbyte_instance` function retrieves all of the connections you have defined in the Airbyte interface, creating software-defined assets for each data stream. Each connection has an associated [op](https://docs.dagster.io/concepts/ops-jobs-graphs/ops#ops) which triggers a sync of that connection. +The `load_assets_from_airbyte_instance` function retrieves all of the connections you have defined in the Airbyte interface, creating asset definitions for each data stream. Each connection has an associated [op](https://docs.dagster.io/concepts/ops-jobs-graphs/ops#ops) which triggers a sync of that connection. -### Loading Airbyte asset definitions from YAML config +### Loading Airbyte asset definitions from YAML config + + + {" "} + has been deprecated as the Octavia CLI is no longer maintained. Consider using{" "} + {" "} + instead. + To load Airbyte assets into Dagster from a set of YAML configuration files, specify the Octavia project directory, which contains the `sources`, `destinations`, and `connections` subfolders. This is the directory where you first ran `octavia init`. Here, the YAML files are treated as the source of truth for building Dagster assets. @@ -113,7 +123,7 @@ airbyte_assets = load_assets_from_airbyte_project( ) ``` -The `load_assets_from_airbyte_project` function parses the YAML metadata, generating a set of software-defined assets which reflect each of the data streams synced by your connections. Each connection has an associated [op](https://docs.dagster.io/concepts/ops-jobs-graphs/ops#ops) which triggers a sync of that connection. +The `load_assets_from_airbyte_project` function parses the YAML metadata, generating a set of asset definitions which reflect each of the data streams synced by your connections. Each connection has an associated [op](https://docs.dagster.io/concepts/ops-jobs-graphs/ops#ops) which triggers a sync of that connection. #### Adding a resource @@ -238,7 +248,7 @@ def stargazers_file(stargazers: pd.DataFrame): # only run the airbyte syncs necessary to materialize stargazers_file my_upstream_job = define_asset_job( "my_upstream_job", - AssetSelection.keys("stargazers_file") + AssetSelection.assets(stargazers_file) .upstream() # all upstream assets (in this case, just the stargazers Airbyte asset) .required_multi_asset_neighbors(), # all Airbyte assets linked to the same connection ) @@ -289,7 +299,7 @@ def stargazers_file(snowflake: SnowflakeResource): # only run the airbyte syncs necessary to materialize stargazers_file my_upstream_job = define_asset_job( "my_upstream_job", - AssetSelection.keys("stargazers_file") + AssetSelection.assets(stargazers_file) .upstream() # all upstream assets (in this case, just the stargazers Airbyte asset) .required_multi_asset_neighbors(), # all Airbyte assets linked to the same connection ) @@ -347,7 +357,7 @@ defs = Definitions( ) ``` -Refer to the [Schedule documentation](/concepts/partitions-schedules-sensors/schedules#running-the-scheduler) for more info on running jobs on a schedule. +Refer to the [Schedule documentation](/concepts/automation/schedules) for more info on running jobs on a schedule. --- @@ -379,14 +389,14 @@ If you have questions on using Airbyte with Dagster, we'd love to hear from you: > diff --git a/docs/content/integrations/airflow.mdx b/docs/content/integrations/airflow.mdx index e98e7752c66c8..41d0d0c53ab73 100644 --- a/docs/content/integrations/airflow.mdx +++ b/docs/content/integrations/airflow.mdx @@ -1,31 +1,24 @@ --- -title: Dagster with Airflow | Dagster -description: The dagster-airflow package allows you to import Airflow DAGs into Dagster jobs and assets, as well as trigger job runs from Airflow. +title: "Airflow & Dagster | Dagster Docs" +description: Explore the options for migrating from Airflow to Dagster. --- -# Using Dagster with Airflow +# Airflow & Dagster - +Migrating from Airflow to Dagster, or integrating Dagster into your existing workflow orchestration stack, can be accomplished in many ways. The [Pick your own journey](/guides/migrations/migrating-airflow-to-dagster) guide provides a variety suggestions in how one can migrate their Airflow pipelines to Dagster, or build a platform where both tools co-exist. -The [`dagster-airflow`](/\_apidocs/libraries/dagster-airflow) library provides interoperability between Dagster and Airflow. The main scenarios for using the Dagster Airflow integration are: - -- You want to do a lift-and-shift migration of all your existing Airflow DAGs into Dagster jobs/Software-defined Assets (SDAs) -- You want to trigger Dagster job runs from Airflow - -This integration is designed to help support users who have existing Airflow usage and are interested in exploring Dagster. +- [Learning Dagster from Airflow](/integrations/airflow/from-airflow-to-dagster) - a step-by-step tutorial of mapping concepts from Airflow to Dagster +- [Migrating from Airflow](/guides/migrations/migrating-airflow-to-dagster) - migration patterns for translating Airflow code to Dagster +- [Observe your Airflow pipelines with Dagster](/guides/migrations/observe-your-airflow-pipelines-with-dagster) - See how Dagster can act as the observation layer over all pipelines in your organization --- -## Migrating to Dagster - -Interested in migrating from Airflow to Dagster? Check out the [migration guide](/integrations/airflow/migrating-to-dagster) for a step-by-step walkthrough. - -If you're not sure how to map Airflow concepts to Dagster, check out the cheatsheet in the next section before you begin. - -### Mapping Airflow concepts to Dagster +## Mapping Airflow concepts to Dagster While Airflow and Dagster have some significant differences, there are many concepts that overlap. Use this cheatsheet to understand how Airflow concepts map to Dagster. +**Want a look at this in code?** Check out the [Learning Dagster from Airflow](/integrations/airflow/from-airflow-to-dagster) guide. + @@ -137,8 +128,8 @@ While Airflow and Dagster have some significant differences, there are many conc
    • - - Auto-materialization + + Declarative Automation
    • @@ -153,8 +144,8 @@ While Airflow and Dagster have some significant differences, there are many conc . When this is the case, backfills - and - auto-materialize + and + Declarative Automation will only materialize later partitions after earlier partitions have completed. @@ -211,7 +202,7 @@ While Airflow and Dagster have some significant differences, there are many conc @@ -230,7 +221,7 @@ While Airflow and Dagster have some significant differences, there are many conc Graphs
    • - Tags + Tags
    • @@ -241,8 +232,8 @@ While Airflow and Dagster have some significant differences, there are many conc
    • @@ -282,18 +273,3 @@ While Airflow and Dagster have some significant differences, there are many conc
      Datasets - - Software-defined Assets (SDAs) - + Assets - SDAs are more powerful and mature than datasets and include support for - things like{" "} + Dagster assets are more powerful and mature than datasets and include + support for things like{" "} partitioning @@ -99,7 +90,7 @@ While Airflow and Dagster have some significant differences, there are many conc Environment variables {" "} - (Dagster Cloud only) + (Dagster+ only)
      Schedulers - Schedules + Schedules
      Dagster provides rich, searchable{" "} - - metadata and tagging + + metadata and tagging {" "} support beyond what’s offered by Airflow.
      - ---- - -## Related - - - - - diff --git a/docs/content/integrations/airflow/from-airflow-to-dagster.mdx b/docs/content/integrations/airflow/from-airflow-to-dagster.mdx index 74afe22882ecd..9f00d12238e2d 100644 --- a/docs/content/integrations/airflow/from-airflow-to-dagster.mdx +++ b/docs/content/integrations/airflow/from-airflow-to-dagster.mdx @@ -1,5 +1,5 @@ --- -title: "Learning Dagster from Airlfow" +title: "Learning Dagster from Airflow" description: How to get started with Dagster from an Airflow background --- @@ -71,7 +71,7 @@ A Dagster [job](/concepts/ops-jobs-graphs/jobs) is made up of a [graph](/concept ## Step 1: Defining the ops -In Dagster, the minimum unit of computation is an op. This directly corresponds to an operator in Airflow. Here, we map the the operators of our example Airflow DAG `t1`, `t2`, and `t3` to their respective Dagster ops. +In Dagster, the minimum unit of computation is an op. This directly corresponds to an operator in Airflow. Here, we map the operators of our example Airflow DAG `t1`, `t2`, and `t3` to their respective Dagster ops. ```python file=/integrations/airflow/tutorial_rewrite_dagster.py startafter=start_ops endbefore=end_ops @op @@ -212,3 +212,264 @@ defs = Definitions( schedules=[schedule], ) ``` + +--- + +## Mapping Airflow concepts to Dagster + +While Airflow and Dagster have some significant differences, there are many concepts that overlap. Use this cheatsheet to understand how Airflow concepts map to Dagster. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      + Airflow concept + + Dagster concept + Notes
      Directed Acyclic Graphs (DAG) + Jobs +
      Task + Ops +
      Datasets + Assets + + Dagster assets are more powerful and mature than datasets and include + support for things like{" "} + + partitioning + + . +
      Connections/Variables + +
      DagBags + Code locations + + Multiple isolated code locations with different system and Python + dependencies can exist within the same Dagster instance. +
      DAG runsJob runs
      + depends_on_past + + + + An asset can{" "} + + depend on earlier partitions of itself + + . When this is the case, + backfills + and + Declarative Automation + will only materialize later partitions after earlier partitions have + completed. +
      Executors + Executors +
      Hooks + Resources + + Dagster resource contain a superset of + the functionality of hooks and have much stronger composition + guarantees. +
      Instances + Instances +
      OperatorsNone + Dagster uses normal Python functions instead of framework-specific + operator classes. For off-the-shelf functionality with third-party + tools, Dagster provides{" "} + integration libraries. +
      Pools + Run coordinators +
      Plugins/Providers + Integrations +
      Schedulers + Schedules +
      Sensors + Sensors +
      SubDAGs/TaskGroups + + + Dagster provides rich, searchable{" "} + + metadata and tagging + {" "} + support beyond what’s offered by Airflow. +
      + task_concurrency + + + Asset/op-level concurrency limits + +
      Trigger + Dagster UI Launchpad + + Triggering and configuring ad-hoc runs is easier in Dagster which allows + them to be initiated through the{" "} + Dagster UI, the{" "} + GraphQL API, or the CLI. +
      XComs + I/O managers + + I/O managers are more powerful than XComs and allow the passing large + datasets between jobs. +
      diff --git a/docs/content/integrations/airflow/migrating-to-dagster.mdx b/docs/content/integrations/airflow/migrating-to-dagster.mdx deleted file mode 100644 index b53577a9e4c01..0000000000000 --- a/docs/content/integrations/airflow/migrating-to-dagster.mdx +++ /dev/null @@ -1,228 +0,0 @@ ---- -title: "Migrating Airflow to Dagster | Dagster Docs" -description: "Learn how do a lift-and-shift migration of airflow to Dagster." ---- - -# Migrating Airflow to Dagster - - - Looking for an example of an Airflow to Dagster migration? Check out the{" "} -
      - dagster-airflow migration example repo on GitHub - - ! - - -Dagster can convert your Airflow DAGs into Dagster jobs, enabling a lift-and-shift migration from Airflow without any rewriting. - -This guide will walk you through the steps of performing this migration. - ---- - -## Prerequisites - -To complete the migration, you'll need: - -- **To perform some upfront analysis**. Refer to the [next section](#before-you-begin) for more detail. - -- **To know the following about your Airflow setup**: - - - What operator types are used in the DAGs you're migrating - - What Airflow connections your DAGs depend on - - What Airflow variables you've set - - What Airflow secrets backend you use - - Where the permissions that your DAGs depend on are defined - -- **If using Dagster Cloud**, an existing [Dagster Cloud](/dagster-cloud) account. While your migrated Airflow DAGs will work with Dagster Open Source, this guide includes setup specific to Dagster Cloud. - - **If you just signed up for a Cloud account**, follow the steps in the [Dagster Cloud Getting Started guide](/dagster-cloud/getting-started) before proceeding. - -### Before you begin - -You may be coming to this document/library in a skeptical frame of mind, having previously been burned by projects that claim to have 100% foolproof, automated migration tools. We want to assure you that we are _not_ making that claim. - -The `dagster-airflow` migration library should be viewed as a powerful _accelerant_ of migration, rather than guaranteeing completely _automated_ migration. The amount of work required is proportional to the complexity of your installation. Smaller implementations of less complexity can be trivial to migrate, making it appear virtually automatic; larger, more complicated, or customized implementations require more investment. - -For larger installations, teams that already adopt devops practices and/or have standardized usage of Airflow will have a smoother transition. - -Some concrete examples: - -- If you rely on the usage of the Airflow UI to set production connections or variables, this will require a change of workflow as you will no longer have the Airflow UI at the end of this migration. If instead you rely on code-as-infrastructure patterns to set connections or variables, migration is more straightforward. -- If you have standardized on a small set or even a single operator type (e.g. the `K8sPodOperator`), migration will be easier. If you use a large number of operator types with a wide range of infrastructure requirements, migration will be more work. -- If you dynamically generate your Airflow DAGs from a higher-level API or DSL (e.g. yaml), the migration will be more straightforward than all your stakeholders directly creating Airflow DAGs. - -Even in the case that requires some infrastructure investment, `dagster-airflow` dramatically eases migration, typically by orders of magnitude. The cost can be borne by a single or small set of infrastructure-oriented engineers, which dramatically reduces coordination costs. You do not have to move all of your stakeholders over to new APIs simultaneously. In our experience, practitioners welcome the change, because of the immediate improvement in tooling, stability, and development speed. - ---- - -## Step 1: Prepare your project for a new Dagster Python module - -While there are many ways to structure an Airflow git repository, this guide assumes you're using a repository structure that contains a single `./dags` DagBag directory that contains all your DAGs. - -In the root of your repository, create a `dagster_migration.py` file. - ---- - -## Step 2: Install Dagster Python packages alongside Airflow - - - This step may require working through a number of version pins. Specifically, - installing Airflow 1.x.x versions may be challenging due to (usually) outdated - constraint files. -
      -
      - Don't get discouraged if you run into problems! Reach out to the Dagster Slack - for help. -
      - -In this step, you'll install the `dagster`, `dagster-airflow`, and `dagster-webserver` Python packages alongside Airflow. **We strongly recommend using a virtualenv.** - -To install everything, run: - -```bash -pip install dagster dagster-airflow dagster-webserver -``` - -We also suggest verifying that you're installing the correct versions of your Airflow dependencies. Verifying the dependency versions will likely save you from debugging tricky errors later. - -To check dependency versions, open your Airflow provider's UI and locate the version numbers. When finished, continue to the next step. - ---- - -## Step 3: Convert DAGS into Dagster definitions - -In this step, you'll start writing Python! - -In the `dagster_migration.py` file you created in [Step 1](#step-1-prepare-your-project-for-a-new-dagster-python-module), use and pass in the file path of your Airflow DagBag. Dagster will load the DagBag and convert all DAGs into Dagster jobs and schedules. - -```python file=/integrations/airflow/migrate_repo.py -import os - -from dagster_airflow import ( - make_dagster_definitions_from_airflow_dags_path, -) - -migrated_airflow_definitions = make_dagster_definitions_from_airflow_dags_path( - os.path.abspath("./dags/"), -) -``` - ---- - -## Step 4: Verify the DAGs are loading - -In this step, you'll spin up Dagster's web-based UI, and verify that your migrated DAGs are loading. **Note**: Unless the migrated DAGs depend on no Airflow configuration state or permissions, it's unlikely they'll execute correctly at this point. That's okay - we'll fix it in a bit. Starting the Dagster UI is the first step in our development loop, allowing you to make a local change, view it in the UI, and debug any errors. - -1. Run the following to start the UI: - - ```bash - dagster dev -f ./migrate_repo.py - ``` - -2. In your browser, navigate to . You should see a list of Dagster jobs that correspond to the DAGs in your Airflow DagBag. - -3. Run one of the simpler jobs, ideally one where you're familiar with the business logic. Note that it's likely to fail due to a configuration or permissions issue. - -4. Using logs to identify and making configuration changes to fix the cause of the failure. - -Repeat these steps as needed until the jobs run successfully. - -### Containerized operator considerations - -There are a variety of Airflow Operator types that are used to launch compute in various external execution environments, for example Kubernetes or Amazon ECS. When getting things working locally we'd recommend trying to execute those containers locally unless it's either unrealistic or impossible to emulate the cloud environment. For example if you use the K8sPodOperator, it likely means that you will need to have a local Kubernetes cluster running, and in that case we recommend docker's built-in Kubernetes environment. You also need to be able to pull down the container images that will be needed for execution to your local machine. - -If local execution is impossible, we recommend using Branch Deployments in Dagster Cloud, which is a well-supported workflow for cloud-native development. - ---- - -## Step 5: Transfer your Airflow configuration - -To port your Airflow configuration, we recommend using [environment variables](/guides/dagster/using-environment-variables-and-secrets) as much as possible. Specifically, we recommend using a `.env` file containing Airflow variables and/or a secrets backend configuration in the root of your project. - -You'll also need to configure the [Airflow connections](https://airflow.apache.org/docs/apache-airflow/stable/howto/connection.html) that your DAGs depend on. To accomplish this, use the `connections` parameter instead of URI-encoded environment variables. - -```python file=/integrations/airflow/migrate_repo_connections.py -import os - -from airflow.models import Connection -from dagster_airflow import make_dagster_definitions_from_airflow_dags_path - -migrated_airflow_definitions = make_dagster_definitions_from_airflow_dags_path( - os.path.abspath("./dags/"), - connections=[ - Connection(conn_id="http_default", conn_type="uri", host="https://google.com") - ], -) -``` - -Iterate as needed until all configuration is correctly ported to your local environment. - ---- - -## Step 6: Deciding on persistent vs ephemeral Airflow database - -The Dagster Airflow migration tooling supports two methods for persisting Airflow metadatabase state. By default, it will use an ephemeral database that is scoped to every job run and thrown away as soon as a job run terminates. You can also configure a persistent database that will be shared by all job runs. This tutorial uses the ephemeral database, but the persistent database option is recommended if you need the following features: - -- retry-from-failure support in Dagster -- Passing Airflow state between DAG runs (i.e., xcoms) -- `SubDAGOperators` -- Airflow Pools - -If you have complex Airflow DAGs (cross-DAG state sharing, pools) or the ability to retry-from-failure you will need to have a persistent database when making your migration. - -You can configure your persistent Airflow database by providing an `airflow_db` to the `resource_defs` parameter of the `dagster-airflow` APIs: - -```python -from dagster_airflow import ( - make_dagster_definitions_from_airflow_dags_path, - make_persistent_airflow_db_resource, -) -postgres_airflow_db = "postgresql+psycopg2://airflow:airflow@localhost:5432/airflow" -airflow_db = make_persistent_airflow_db_resource(uri=postgres_airflow_db) -definitions = make_dagster_definitions_from_airflow_example_dags( - '/path/to/dags/', - resource_defs={"airflow_db": airflow_db} -) -``` - ---- - -## Step 7: Move to production - - - This step is applicable to Dagster Cloud. If deploying to your infrastructure, - refer to the Deployment guides for more info. -
      -
      - Additionally, until your Airflow DAGs execute successfully in your local environment, - we recommend waiting to move to production. -
      - -In this step, you'll set up your project for use with Dagster Cloud. - - - -1. Complete the steps in the [Dagster Cloud Getting Started guide](/dagster-cloud/getting-started), if you haven't already. Proceed to the next step when your account is set up and you have the `dagster-cloud` CLI installed. - -2. In the root of your project, create or modify the [`dagster_cloud.yaml` file](/dagster-cloud/managing-deployments/dagster-cloud-yaml) with the following code: - - ```yaml - locations: - - location_name: dagster_migration - code_source: - python_file: dagster_migration.py - ``` - -3. Push your code and let Dagster Cloud's CI/CD run out a deployment of your migrated DAGs to cloud. - ---- - -## Step 8: Migrate permissions to Dagster - -Your Airflow instance likely had specific IAM or Kubernetes permissions that allowed it to successfully run your Airflow DAGs. To run the migrated Dagster jobs, you'll need to duplicate these permissions for Dagster. - -- **We recommend using [Airflow connections](https://airflow.apache.org/docs/apache-airflow/stable/howto/connection.html) or [environment variables](/dagster-cloud/managing-deployments/environment-variables-and-secrets)** to define permissions whenever possible. - -- **If you're unable to use Airflow connections or environment variables,** you can attach permissions directly to the infrastructure where you're deploying Dagster. - -- **If your Airflow DAGs used [`KubernetesPodOperators`](https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/stable/operators.html)**, it's possible that you loaded a `kube_config` file or used the `in_cluster` config. When migrating, we recommend switching to [using connections with a `kube_config` JSON blob](https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes/stable/connections/kubernetes.html) to make things easier. diff --git a/docs/content/integrations/airflow/reference.mdx b/docs/content/integrations/airflow/reference.mdx deleted file mode 100644 index 9a3e0249f22e8..0000000000000 --- a/docs/content/integrations/airflow/reference.mdx +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: "Airflow integration reference | Dagster Docs" -description: The Airflow package lets you convert Airflow DAGs into Dagster Jobs. ---- - -# Airflow integration reference - -This reference provides a high-level look at working with Airflow using the [`dagster-airflow` integration library](/\_apidocs/libraries/dagster-airflow). - ---- - -## Airflow DAG directory as a Dagster repository - -To load all Airflow DAGS in a file path into a [Dagster repository](/concepts/repositories-workspaces/repositories), use : - -```python file=/integrations/airflow/migrate_repo.py -import os - -from dagster_airflow import ( - make_dagster_definitions_from_airflow_dags_path, -) - -migrated_airflow_definitions = make_dagster_definitions_from_airflow_dags_path( - os.path.abspath("./dags/"), -) -``` - ---- - -## Orchestrating Dagster jobs from Airflow - -You can orchestrate Dagster job runs from Airflow by using the `DagsterCloudOperator` or `DagsterOperator` operators in your existing Airflow DAGs. For example, here's an Airflow DAG: - -```python file=/integrations/airflow/operator.py -from datetime import datetime - -from airflow import DAG -from dagster_airflow import DagsterCloudOperator - -with DAG( - dag_id="dagster_cloud", - start_date=datetime(2022, 5, 28), - schedule_interval="*/5 * * * *", - catchup=False, -) as dag: - DagsterCloudOperator( - task_id="new_dagster_assets", - repostitory_location_name="example_location", - repository_name="my_dagster_project", - job_name="all_assets_job", - ) -``` - -In Airflow 2.0+, you can create a Dagster connection type to store configuration related to your Dagster Cloud organization. If you're using Airflow 1.0, you can also pass this directly to the operator. - ---- - -## Ingesting DAGs from Airflow - -This example demonstrates how to use to compile an Airflow DAG into a Dagster job that works the same way as a Dagster-native job. - -There are three jobs in the repo: - -- `airflow_simple_dag` demonstrates the use of Airflow templates. -- `airflow_complex_dag` shows the translation of a more complex dependency structure. -- `airflow_kubernetes_dag` shows the translation of a DAG using Kubernetes pod operators. - -```python file=../../with_airflow/with_airflow/repository.py startafter=start_repo_marker_0 endbefore=end_repo_marker_0 -from dagster_airflow import ( - make_dagster_job_from_airflow_dag, - make_schedules_and_jobs_from_airflow_dag_bag, -) - -from with_airflow.airflow_complex_dag import complex_dag -from with_airflow.airflow_kubernetes_dag import kubernetes_dag -from with_airflow.airflow_simple_dag import simple_dag - -airflow_simple_dag = make_dagster_job_from_airflow_dag(simple_dag) -airflow_complex_dag = make_dagster_job_from_airflow_dag(complex_dag) -airflow_kubernetes_dag = make_dagster_job_from_airflow_dag(kubernetes_dag) - - -@repository -def with_airflow(): - return [airflow_complex_dag, airflow_simple_dag, airflow_kubernetes_dag] -``` - -Note that the `execution_date` for the Airflow DAG is specified through the job tags. To specify tags, call to: - -```python file=../../with_airflow/with_airflow/repository.py startafter=start_repo_marker_1 endbefore=end_repo_marker_1 -airflow_simple_dag_with_execution_date = make_dagster_job_from_airflow_dag( - dag=simple_dag, tags={"airflow_execution_date": datetime.now().isoformat()} -) -``` diff --git a/docs/content/integrations/bigquery.mdx b/docs/content/integrations/bigquery.mdx index ce5ac2862069e..d56443c3bda4d 100644 --- a/docs/content/integrations/bigquery.mdx +++ b/docs/content/integrations/bigquery.mdx @@ -1,11 +1,11 @@ --- -title: "Google BigQuery + Dagster" +title: "Google BigQuery & Dagster | Dagster Docs" description: Store your Dagster assets in BigQuery --- -# Google BigQuery + Dagster +# Google BigQuery & Dagster -Using Dagster's [software-defined assets](/concepts/assets/software-defined-assets) and BigQuery I/O manager, you can easily interact with BigQuery tables alongside other Dagster assets. +Using Dagster's [asset definitions](/concepts/assets/software-defined-assets) and BigQuery I/O manager, you can easily interact with BigQuery tables alongside other Dagster assets. Managing your BigQuery tables with Dagster enables you to: @@ -18,7 +18,7 @@ Managing your BigQuery tables with Dagster enables you to: ## BigQuery and Dagster tutorial -In this tutorial, you'll learn how to store and load Dagster's [software-defined asset](/concepts/assets/software-defined-assets) in BigQuery. [Click here to get started](/integrations/bigquery/using-bigquery-with-dagster). +In this tutorial, you'll learn how to store and load Dagster's [asset definitions](/concepts/assets/software-defined-assets) in BigQuery. [Click here to get started](/integrations/bigquery/using-bigquery-with-dagster). By the end of the tutorial, you will have a connection to your BigQuery instance and a handful of assets that create tables in BigQuery or read existing tables from BigQuery. diff --git a/docs/content/integrations/bigquery/reference.mdx b/docs/content/integrations/bigquery/reference.mdx index b4478d5e7400f..89613838ff067 100644 --- a/docs/content/integrations/bigquery/reference.mdx +++ b/docs/content/integrations/bigquery/reference.mdx @@ -20,7 +20,7 @@ This reference page provides information for working with [`dagster-bigquery`](/ ## Providing credentials as configuration -In most cases, you will authenticate with Google Cloud Project (GCP) using one of the methods outlined in the [GCP documentation](https://cloud.google.com/docs/authentication/provide-credentials-adc). However, in some cases you may find that you need to provide authentication credentials directly to the BigQuery I/O manager. For example, if you are using [Dagster Cloud Serverless](/dagster-cloud/deployment/serverless) you cannot upload a credential file, so must provide your credentials as an environment variable. +In most cases, you will authenticate with Google Cloud Project (GCP) using one of the methods outlined in the [GCP documentation](https://cloud.google.com/docs/authentication/provide-credentials-adc). However, in some cases you may find that you need to provide authentication credentials directly to the BigQuery I/O manager. For example, if you are using [Dagster+ Serverless](/dagster-plus/deployment/serverless) you cannot upload a credential file, so must provide your credentials as an environment variable. You can provide credentials directly to the BigQuery I/O manager by using the `gcp_credentials` configuration value. The BigQuery I/O manager will create a temporary file to store the credential and will set `GOOGLE_APPLICATION_CREDENTIALS` to point to this file. When the Dagster run is completed, the temporary file is deleted and `GOOGLE_APPLICATION_CREDENTIALS` is unset. @@ -273,7 +273,7 @@ You can specify the default dataset where data will be stored as configuration t If you want to store assets in different datasets, you can specify the dataset as metadata: ```python file=/integrations/bigquery/reference/dataset.py startafter=start_metadata endbefore=end_metadata dedent=4 -daffodil_data = SourceAsset(key=["daffodil_data"], metadata={"schema": "daffodil"}) +daffodil_data = AssetSpec(key=["daffodil_data"], metadata={"schema": "daffodil"}) @asset(metadata={"schema": "iris"}) def iris_data() -> pd.DataFrame: @@ -292,7 +292,7 @@ def iris_data() -> pd.DataFrame: You can also specify the dataset as part of the asset's asset key: ```python file=/integrations/bigquery/reference/dataset.py startafter=start_asset_key endbefore=end_asset_key dedent=4 -daffodil_data = SourceAsset(key=["gcp", "bigquery", "daffodil", "daffodil_data"]) +daffodil_data = AssetSpec(key=["gcp", "bigquery", "daffodil", "daffodil_data"]) @asset(key_prefix=["gcp", "bigquery", "iris"]) def iris_data() -> pd.DataFrame: @@ -430,16 +430,8 @@ The `BigQueryPySparkIOManager` requires that a `SparkSession` be active and conf from dagster_gcp_pyspark import BigQueryPySparkIOManager from dagster_pyspark import pyspark_resource from pyspark import SparkFiles -from pyspark.sql import ( - DataFrame, - SparkSession, -) -from pyspark.sql.types import ( - DoubleType, - StringType, - StructField, - StructType, -) +from pyspark.sql import DataFrame, SparkSession +from pyspark.sql.types import DoubleType, StringType, StructField, StructType from dagster import AssetExecutionContext, Definitions, asset @@ -486,16 +478,8 @@ defs = Definitions( ```python file=/integrations/bigquery/reference/pyspark_with_spark_session.py from dagster_gcp_pyspark import BigQueryPySparkIOManager from pyspark import SparkFiles -from pyspark.sql import ( - DataFrame, - SparkSession, -) -from pyspark.sql.types import ( - DoubleType, - StringType, - StructField, - StructType, -) +from pyspark.sql import DataFrame, SparkSession +from pyspark.sql.types import DoubleType, StringType, StructField, StructType from dagster import Definitions, asset diff --git a/docs/content/integrations/bigquery/using-bigquery-with-dagster.mdx b/docs/content/integrations/bigquery/using-bigquery-with-dagster.mdx index 30d5bdd4dbf4a..54ac26c62687a 100644 --- a/docs/content/integrations/bigquery/using-bigquery-with-dagster.mdx +++ b/docs/content/integrations/bigquery/using-bigquery-with-dagster.mdx @@ -5,7 +5,7 @@ description: Store your Dagster assets in BigQuery # Using Dagster with Google BigQuery -This tutorial focuses on creating and interacting with BigQuery tables using Dagster's [software-defined assets (SDAs)](/concepts/assets/software-defined-assets). +This tutorial focuses on creating and interacting with BigQuery tables using Dagster's [asset definitions](/concepts/assets/software-defined-assets). The `dagster-gcp` library provides two ways to interact with BigQuery tables: @@ -129,15 +129,15 @@ Now you can run `dagster dev` and materialize the `iris_data` asset from the Dag #### Making Dagster aware of existing tables -If you already have existing tables in BigQuery and other assets defined in Dagster depend on those tables, you may want Dagster to be aware of those upstream dependencies. Making Dagster aware of these tables will allow you to track the full data lineage in Dagster. You can accomplish this by creating [source assets](/concepts/assets/software-defined-assets#defining-external-asset-dependencies) for these tables. +If you already have existing tables in BigQuery and other assets defined in Dagster depend on those tables, you may want Dagster to be aware of those upstream dependencies. Making Dagster aware of these tables will allow you to track the full data lineage in Dagster. You can accomplish this by defining [external assets](/concepts/assets/external-assets) for these tables. ```python file=/integrations/bigquery/tutorial/resource/source_asset.py -from dagster import SourceAsset +from dagster import AssetSpec -iris_harvest_data = SourceAsset(key="iris_harvest_data") +iris_harvest_data = AssetSpec(key="iris_harvest_data") ``` -In this example, you're creating a for a pre-existing table called `iris_harvest_data`. +In this example, you're creating an for a pre-existing table called `iris_harvest_data`.
      @@ -176,9 +176,9 @@ import pandas as pd from dagster_gcp import BigQueryResource from google.cloud import bigquery as bq -from dagster import Definitions, SourceAsset, asset +from dagster import AssetSpec, Definitions, asset -iris_harvest_data = SourceAsset(key="iris_harvest_data") +iris_harvest_data = AssetSpec(key="iris_harvest_data") @asset @@ -311,17 +311,17 @@ When Dagster materializes the `iris_data` asset using the configuration from [St #### Making Dagster aware of existing tables -If you already have existing tables in BigQuery and other assets defined in Dagster depend on those tables, you may want Dagster to be aware of those upstream dependencies. Making Dagster aware of these tables will allow you to track the full data lineage in Dagster. You can create [source assets](/concepts/assets/software-defined-assets#defining-external-asset-dependencies) for these tables. When using an I/O manager, creating a source asset for an existing table also allows you to tell Dagster how to find the table so it can be fetched for downstream assets. +If you already have existing tables in BigQuery and other assets defined in Dagster depend on those tables, you may want Dagster to be aware of those upstream dependencies. Making Dagster aware of these tables will allow you to track the full data lineage in Dagster. You can define [external assets](/concepts/assets/external-assets) for these tables. When using an I/O manager, defining an external asset for an existing table also allows you to tell Dagster how to find the table so it can be fetched for downstream assets. ```python file=/integrations/bigquery/tutorial/io_manager/source_asset.py -from dagster import SourceAsset +from dagster import AssetSpec -iris_harvest_data = SourceAsset(key="iris_harvest_data") +iris_harvest_data = AssetSpec(key="iris_harvest_data") ``` -In this example, you're creating a for a pre-existing table - perhaps created by an external data ingestion tool - that contains data about iris harvests. To make the data available to other Dagster assets, you need to tell the BigQuery I/O manager how to find the data, so that the I/O manager can load the data into memory. +In this example, you're creating a for a pre-existing table - perhaps created by an external data ingestion tool - that contains data about iris harvests. To make the data available to other Dagster assets, you need to tell the BigQuery I/O manager how to find the data, so that the I/O manager can load the data into memory. -Because you already supplied the project and dataset in the I/O manager configuration in [Step 1: Configure the BigQuery I/O manager](#step-1-configure-the-bigquery-io-manager), you only need to provide the table name. This is done with the `key` parameter in `SourceAsset`. When the I/O manager needs to load the `iris_harvest_data` in a downstream asset, it will select the data in the `IRIS.IRIS_HARVEST_DATA` table as a Pandas DataFrame and provide it to the downstream asset. +Because you already supplied the project and dataset in the I/O manager configuration in [Step 1: Configure the BigQuery I/O manager](#step-1-configure-the-bigquery-io-manager), you only need to provide the table name. This is done with the `key` parameter in `AssetSpec`. When the I/O manager needs to load the `iris_harvest_data` in a downstream asset, it will select the data in the `IRIS.IRIS_HARVEST_DATA` table as a Pandas DataFrame and provide it to the downstream asset.
      @@ -355,9 +355,9 @@ When finished, your code should look like the following: import pandas as pd from dagster_gcp_pandas import BigQueryPandasIOManager -from dagster import Definitions, SourceAsset, asset +from dagster import AssetSpec, Definitions, asset -iris_harvest_data = SourceAsset(key="iris_harvest_data") +iris_harvest_data = AssetSpec(key="iris_harvest_data") @asset @@ -398,6 +398,6 @@ defs = Definitions( For more BigQuery features, refer to the [BigQuery reference](/integrations/bigquery/reference). -For more information on software-defined assets, refer to the [tutorial](/tutorial) or the [Assets concept documentation](/concepts/assets/software-defined-assets). +For more information on asset definitions, refer to the [tutorial](/tutorial) or the [Assets concept documentation](/concepts/assets/software-defined-assets). For more information on I/O managers, refer to the [I/O manager concept documentation](/concepts/io-management/io-managers). diff --git a/docs/content/integrations/dagstermill.mdx b/docs/content/integrations/dagstermill.mdx index 7ef0e554878f6..d5d52dd052088 100644 --- a/docs/content/integrations/dagstermill.mdx +++ b/docs/content/integrations/dagstermill.mdx @@ -1,11 +1,11 @@ --- -title: Using Jupyter notebooks with Papermill and Dagster +title: "Jupyter/Papermill & Dagster | Dagster Docs" description: The Dagstermill package lets you run notebooks using the Dagster tools and integrate them into your data pipelines. --- -# Dagstermill: Jupyter/Papermill + Dagster +# Dagstermill: Jupyter/Papermill & Dagster -Notebooks are an indispensible tool for data science. They allow for easy exploration of datasets, fast iteration, and the ability to create a rich report with Markdown blocks and inline plotting. The Dagstermill (Dagster + papermill) package makes it straightforward to run notebooks using Dagster tools and to integrate them with your Dagster assets or jobs. +Notebooks are an indispensible tool for data science. They allow for easy exploration of datasets, fast iteration, and the ability to create a rich report with Markdown blocks and inline plotting. The Dagstermill (Dagster & Papermill) package makes it straightforward to run notebooks using Dagster tools and to integrate them with your Dagster assets or jobs. Using the Dagstermill library enables you to: diff --git a/docs/content/integrations/dagstermill/using-notebooks-with-dagster.mdx b/docs/content/integrations/dagstermill/using-notebooks-with-dagster.mdx index 609778d7d41b1..b1f838780e3f0 100644 --- a/docs/content/integrations/dagstermill/using-notebooks-with-dagster.mdx +++ b/docs/content/integrations/dagstermill/using-notebooks-with-dagster.mdx @@ -58,7 +58,7 @@ To complete this tutorial, you'll need: - **A template version of the tutorial project**, which you can use to follow along with the tutorial. This is the `tutorial_template` subfolder. In this folder, you'll also find: - - `assets`, a subfolder containing Dagster assets. We'll use `/assets/__init__.py` to write these. + - `assets`, a subfolder containing Dagster assets. We'll use `/assets.py` to write these. - `notebooks`, a subfolder containing Jupyter notebooks. We'll use `/notebooks/iris-kmeans.ipynb` to write a Jupyter notebook. --- @@ -119,17 +119,17 @@ Like many notebooks, this example does some fairly sophisticated work, including By creating a Dagster asset from our notebook, we can integrate the notebook as part of our data platform. This enables us to make its contents more accessible to developers, stakeholders, and other assets in Dagster. -To create a Dagster asset from a Jupyter notebook, we can use the function. In `/tutorial_template/assets/__init__.py` add the following code snippet: +To create a Dagster asset from a Jupyter notebook, we can use the function. In `/tutorial_template/assets.py` add the following code snippet: ```python -# /tutorial_template/assets/__init__.py +# /tutorial_template/assets.py from dagstermill import define_dagstermill_asset from dagster import file_relative_path iris_kmeans_jupyter_notebook = define_dagstermill_asset( name="iris_kmeans_jupyter", - notebook_path=file_relative_path(__file__, "../notebooks/iris-kmeans.ipynb"), + notebook_path=file_relative_path(__file__, "notebooks/iris-kmeans.ipynb"), group_name="template_tutorial", ) ``` @@ -152,18 +152,18 @@ We want to execute our Dagster asset and save the resulting notebook to a persis Additionally, we need to provide a [resource](/concepts/resources) to the notebook to tell Dagster how to store the resulting `.ipynb` file. We'll use an [I/O manager](/concepts/io-management/io-managers) to accomplish this. -Open the `/tutorial_template/__init__.py` file and add the following code: +Open the `/tutorial_template/definitions.py` file and add the following code: ```python -# tutorial_template/__init__.py +# tutorial_template/definitions.py -from dagster import load_assets_from_package_module, Definitions +from dagster import load_assets_from_modules, Definitions from dagstermill import ConfigurableLocalOutputNotebookIOManager from . import assets defs = Definitions( - assets=load_assets_from_package_module(assets), + assets=load_assets_from_modules([assets]), resources={ "output_notebook_io_manager": ConfigurableLocalOutputNotebookIOManager() } @@ -173,7 +173,7 @@ defs = Definitions( Let's take a look at what's happening here: -- Using , we've imported all assets in the `assets` module. This approach allows any new assets we create to be automatically added to the `Definitions` object instead of needing to manually add them one by one. +- Using , we've imported all assets in the `assets` module. This approach allows any new assets we create to be automatically added to the `Definitions` object instead of needing to manually add them one by one. - We provided a dictionary of resources to the `resources` parameter. In this example, that's the resource. @@ -255,10 +255,10 @@ In this step, you'll: ### Step 5.1: Create the Iris dataset asset -To create an asset for the Iris dataset, add the following code to `/tutorial_template/assets/__init__.py`: +To create an asset for the Iris dataset, add the following code to `/tutorial_template/assets.py`: ```python -# /tutorial_template/assets/__init__.py +# /tutorial_template/assets.py from dagstermill import define_dagstermill_asset from dagster import asset, file_relative_path @@ -290,10 +290,10 @@ Let's go over what's happening in this code block: ### Step 5.2: Provide the iris_dataset asset to the notebook asset -Next, we need to tell Dagster that the `iris_datset` asset is input data for the `iris-kmeans` notebook. To do this, add the `ins` parameter to the notebook asset: +Next, we need to tell Dagster that the `iris_dataset` asset is input data for the `iris-kmeans` notebook. To do this, add the `ins` parameter to the notebook asset: ```python -# tutorial_template/assets/__init__.py +# tutorial_template/assets.py from dagstermill import define_dagstermill_asset from dagster import asset, file_relative_path, AssetIn import pandas as pd @@ -302,7 +302,7 @@ import pandas as pd iris_kmeans_jupyter_notebook = define_dagstermill_asset( name="iris_kmeans_jupyter", - notebook_path=file_relative_path(__file__, "../notebooks/iris-kmeans.ipynb"), + notebook_path=file_relative_path(__file__, "notebooks/iris-kmeans.ipynb"), group_name="template_tutorial", ins={"iris": AssetIn("iris_dataset")}, # this is the new parameter! ) diff --git a/docs/content/integrations/databricks.mdx b/docs/content/integrations/databricks.mdx deleted file mode 100644 index 56cd1426d5f15..0000000000000 --- a/docs/content/integrations/databricks.mdx +++ /dev/null @@ -1,175 +0,0 @@ ---- -title: "Using Dagster with Databricks | Dagster Docs" -description: Dagster can orchestrate Databricks jobs alongside other technologies. ---- - -# Using Databricks with Dagster - -Dagster can orchestrate your Databricks jobs and other Databricks API calls, making it easy to chain together multiple Databricks jobs and orchestrate Databricks alongside your other technologies. - ---- - -## Prerequisites - -To get started, you will need to install the `dagster` and `dagster-databricks` Python packages: - -```bash -pip install dagster dagster-databricks -``` - -You'll also want to have a Databricks workspace with an existing project that is deployed with a Databricks job. If you don't have this, [follow the Databricks quickstart](https://docs.databricks.com/workflows/jobs/jobs-quickstart.html) to set one up. - -To manage your Databricks job from Dagster, you'll need three values, which can be set as [environment variables in Dagster](/guides/dagster/using-environment-variables-and-secrets): - -1. A `host` for connecting with your Databricks workspace, starting with `https://`, stored in an environment variable `DATABRICKS_HOST`, -2. A `token` corresponding to a personal access token for your Databricks workspace, stored in an environment variable `DATABRICKS_TOKEN`, and -3. A `DATABRICKS_JOB_ID` for the Databricks job you want to run. - -You can follow the [Databricks API authentication instructions](https://docs.databricks.com/dev-tools/python-api.html#step-1-set-up-authentication) to retrieve these values. - ---- - -## Step 1: Connecting to Databricks - -The first step in using Databricks with Dagster is to tell Dagster how to connect to your Databricks workspace using a Databricks [resource](/concepts/resources). This resource contains information on the location of your Databricks workspace and any credentials sourced from environment variables that are required to access it. You can access the underlying [Databricks API client](https://docs.databricks.com/dev-tools/python-api.html) to communicate to your Databricks workspace by configuring the resource. - -For more information about the Databricks resource, see the [API reference](/\_apidocs/libraries/dagster-databricks). - -```python startafter=start_define_databricks_client_instance endbefore=end_define_databricks_client_instance file=/integrations/databricks/databricks.py dedent=4 -from dagster_databricks import databricks_client - -databricks_client_instance = databricks_client.configured( - { - "host": {"env": "DATABRICKS_HOST"}, - "token": {"env": "DATABRICKS_TOKEN"}, - } -) -``` - ---- - -## Step 2: Create an op/asset that connects to Databricks - -In this step, we'll demonstrate several ways to model a Databricks API call as either a Dagster [op](/concepts/ops-jobs-graphs/ops) or the computation backing a [Software-defined asset](/concepts/assets/software-defined-assets). You can either: - -- Use the `dagster-databricks` op factories, which create ops that invoke the Databricks Jobs' [Run Now](https://docs.databricks.com/api-explorer/workspace/jobs/runnow) ([`create_databricks_run_now_op`](/\_apidocs/libraries/dagster-databricks)) or [Submit Run](https://docs.databricks.com/api-explorer/workspace/jobs/submit) ([`create_databricks_submit_run_op`](/\_apidocs/libraries/dagster-databricks)) APIs, or -- Manually create a Dagster op or asset that connects to Databricks using the configured Databricks resource. - -Afterward, we create a Dagster [job](/concepts/ops-jobs-graphs/jobs) that invokes the op or selects the asset to run the Databricks API call. - -For guidance on deciding whether to use an op or asset, refer to the [Understanding how assets relate to ops guide](/guides/dagster/how-assets-relate-to-ops-and-graphs). - - - - - -```python startafter=start_define_databricks_op_factories endbefore=end_define_databricks_op_factories file=/integrations/databricks/databricks.py dedent=4 -from dagster_databricks import create_databricks_run_now_op - -my_databricks_run_now_op = create_databricks_run_now_op( - databricks_job_id=DATABRICKS_JOB_ID, -) - -@job(resource_defs={"databricks": databricks_client_instance}) -def my_databricks_job(): - my_databricks_run_now_op() -``` - - - - - -```python startafter=start_define_databricks_custom_op endbefore=end_define_databricks_custom_op file=/integrations/databricks/databricks.py dedent=4 -from databricks_cli.sdk import DbfsService - -from dagster import ( - AssetExecutionContext, - job, - op, -) - -@op(required_resource_keys={"databricks"}) -def my_databricks_op(context: AssetExecutionContext) -> None: - databricks_api_client = context.resources.databricks.api_client - dbfs_service = DbfsService(databricks_api_client) - - dbfs_service.read(path="/tmp/HelloWorld.txt") - -@job(resource_defs={"databricks": databricks_client_instance}) -def my_databricks_job(): - my_databricks_op() -``` - - - - - -```python startafter=start_define_databricks_custom_asset endbefore=end_define_databricks_custom_asset file=/integrations/databricks/databricks.py dedent=4 -from databricks_cli.sdk import JobsService - -from dagster import ( - AssetExecutionContext, - AssetSelection, - asset, - define_asset_job, -) - -@asset(required_resource_keys={"databricks"}) -def my_databricks_table(context: AssetExecutionContext) -> None: - databricks_api_client = context.resources.databricks.api_client - jobs_service = JobsService(databricks_api_client) - - jobs_service.run_now(job_id=DATABRICKS_JOB_ID) - -materialize_databricks_table = define_asset_job( - name="materialize_databricks_table", - selection=AssetSelection.keys("my_databricks_table"), -) -``` - - - - - ---- - -## Step 3: Schedule your Databricks computation - -Now that your Databricks API calls are modeled within Dagster, you can [schedule](/concepts/partitions-schedules-sensors/schedules) them to run regularly. - -In the example below, we schedule the `materialize_databricks_table` and `my_databricks_job` jobs to run daily: - -```python startafter=start_schedule_databricks endbefore=end_schedule_databricks file=/integrations/databricks/databricks.py dedent=4 -from dagster import ( - AssetSelection, - Definitions, - ScheduleDefinition, -) - -defs = Definitions( - assets=[my_databricks_table], - schedules=[ - ScheduleDefinition( - job=materialize_databricks_table, - cron_schedule="@daily", - ), - ScheduleDefinition( - job=my_databricks_job, - cron_schedule="@daily", - ), - ], - jobs=[my_databricks_job], - resources={"databricks": databricks_client_instance}, -) -``` - ---- - -## What's next? - -By now, you should have a working Databricks and Dagster integration! - -What's next? From here, you can: - -- Learn more about [software-defined assets](/concepts/assets/software-defined-assets) -- Check out the [`dagster-databricks` API docs](/\_apidocs/libraries/dagster-databricks) diff --git a/docs/content/integrations/dbt-cloud.mdx b/docs/content/integrations/dbt-cloud.mdx index bb1c6ee757b81..20bbe441e1150 100644 --- a/docs/content/integrations/dbt-cloud.mdx +++ b/docs/content/integrations/dbt-cloud.mdx @@ -1,9 +1,9 @@ --- -title: "Using Dagster with dbt Cloud" +title: "dbt Cloud & Dagster | Dagster Docs" description: Dagster can orchestrate dbt Cloud alongside other technologies. --- -# Using dbt Cloud with Dagster +# dbt Cloud & Dagster Using the local dbt Core CLI? Check out the{" "} @@ -13,7 +13,7 @@ description: Dagster can orchestrate dbt Cloud alongside other technologies. ! -Dagster allows you to run dbt Cloud alongside other technologies like Spark, Python, etc., and has built-in support for loading dbt Cloud models, seeds, and snapshots as [Software-defined Assets](/concepts/assets/software-defined-assets). +Dagster allows you to run dbt Cloud alongside other technologies like Spark, Python, etc., and has built-in support for loading dbt Cloud models, seeds, and snapshots as [asset definitions](/concepts/assets/software-defined-assets). --- @@ -64,7 +64,7 @@ In this example, is used to pass in dbt Cloud crede ## Step 2: Loading dbt Cloud models as assets -In this step, you'll load the dbt Cloud models managed by a dbt Cloud job into Dagster as [Software-defined Assets](/concepts/assets/software-defined-assets). +In this step, you'll load the dbt Cloud models managed by a dbt Cloud job into Dagster as [asset definitions](/concepts/assets/software-defined-assets). For context, a dbt Cloud job defines set of commands to run for a dbt Cloud project. The dbt Cloud models managed by a dbt Cloud job are the models that are run by the job after filtering options are respected. @@ -93,7 +93,7 @@ When invoked, the function: 1. Invokes your dbt Cloud job with command overrides to compile your dbt project, 2. Parses the metadata provided by dbt Cloud, and -3. Generates a set of Software-defined Assets reflecting the models in the project managed by the dbt Cloud job. Materializing these assets will run the dbt Cloud job that is represented by the loaded assets. +3. Generates a set of asset definitions reflecting the models in the project managed by the dbt Cloud job. Materializing these assets will run the dbt Cloud job that is represented by the loaded assets. --- @@ -203,5 +203,5 @@ By now, you should have a working dbt Cloud and Dagster integration and a handfu What's next? From here, you can: -- Learn more about [Software-defined Assets](/concepts/assets/software-defined-assets) +- Learn more about [asset definitions](/concepts/assets/software-defined-assets) - Check out the [`dagster-dbt` API docs](/\_apidocs/libraries/dagster-dbt) diff --git a/docs/content/integrations/dbt.mdx b/docs/content/integrations/dbt.mdx index d94531a3156ea..61c3a0f1b7aa2 100644 --- a/docs/content/integrations/dbt.mdx +++ b/docs/content/integrations/dbt.mdx @@ -1,24 +1,23 @@ --- -title: "dbt + Dagster" +title: "dbt & Dagster | Dagster Docs" description: Dagster can orchestrate dbt alongside other technologies. --- -# dbt + Dagster +# dbt & Dagster - Using Dagster Cloud? Automatically load your dbt models as Dagster - assets by{" "} + Using Dagster+? Automatically load your dbt models as Dagster assets by{" "} - importing an existing dbt project in Dagster Cloud + importing an existing dbt project in Dagster+ Dagster orchestrates dbt alongside other technologies, so you can schedule dbt with Spark, Python, etc. in a single data pipeline. -Dagster's [Software-defined Asset](/concepts/assets/software-defined-assets) approach allows Dagster to understand dbt at the level of individual dbt models. This means that you can: +Dagster's [asset definition](/concepts/assets/software-defined-assets) approach allows Dagster to understand dbt at the level of individual dbt models. This means that you can: - Use Dagster's UI or APIs to run subsets of your dbt models, seeds, and snapshots. - Track failures, logs, and run history for individual dbt models, seeds, and snapshots. @@ -26,7 +25,7 @@ Dagster's [Software-defined Asset](/concepts/assets/software-defined-assets) app An asset graph like this: - + @@ -62,8 +61,7 @@ def dbt_project_assets(context: AssetExecutionContext, dbt: DbtCliResource): compute_kind="tensorflow", deps=[get_asset_key_for_model([dbt_project_assets], "daily_order_summary")], ) -def predicted_orders(): - ... +def predicted_orders(): ... ``` --- @@ -73,14 +71,14 @@ def predicted_orders(): There are a few ways to get started with Dagster and dbt: - Take the [tutorial](/integrations/dbt/using-dbt-with-dagster). We'll walk you through setting up dbt and Dagster together on your computer, using dbt's example [jaffle shop project](https://github.com/dbt-labs/jaffle_shop), the [dagster-dbt library](/\_apidocs/libraries/dagster-dbt), and a data warehouse, such as [DuckDB](https://duckdb.org/). By the end, you'll have a working dbt and Dagster project and a handful of materialized Dagster assets, including a chart powered by data from your dbt models. -- Play around with a [working dbt + Dagster project](https://github.com/dagster-io/dagster/tree/master/examples/assets_dbt_python). -- Browse the [dagster-dbt integration reference](/integrations/dbt/reference) for short lessons on Dagster + dbt topics. -- Review the [API docs](/\_apidocs/libraries/dagster-dbt) for the dagster-dbt library. -- Automatically load your dbt models as Dagster assets by [importing an existing dbt project into Dagster Cloud](https://dagster.cloud/signup?next=/prod/getting-started%3Ftab%3Dimport_dbt_core_project%26serverless%3D1). +- Play around with a [working dbt & Dagster project](https://github.com/dagster-io/dagster/tree/master/examples/assets_dbt_python). +- Browse the [`dagster-dbt` integration reference](/integrations/dbt/reference) for short lessons on dbt and Dagster topics. +- Review the [API docs](/\_apidocs/libraries/dagster-dbt) for the `dagster-dbt` library. +- Automatically load your dbt models as Dagster assets by [importing an existing dbt project into Dagster+](https://dagster.cloud/signup?next=/prod/getting-started%3Ftab%3Dimport_dbt_core_project%26serverless%3D1). --- -## Understanding how dbt models relate to Dagster Software-defined assets +## Understanding how dbt models relate to Dagster asset definitions diff --git a/docs/content/integrations/dbt/quickstart.mdx b/docs/content/integrations/dbt/quickstart.mdx new file mode 100644 index 0000000000000..c213eb3da3818 --- /dev/null +++ b/docs/content/integrations/dbt/quickstart.mdx @@ -0,0 +1,328 @@ +--- +title: "Dagster and dbt quickstart | Dagster Docs" +description: Get started quickly with this simple dbt & Dagster example. +--- + +# Dagster & dbt quickstart + +This quickstart will get your dbt project up and running quickly with Dagster. By the end of this guide, you'll have an integrated Dagster and dbt project and be able to view it in the Dagster UI. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- A valid dbt project + - A dbt project must contain the `dbt_project.yml` and `profiles.yml` files + - A test project like [Jaffle Shop](https://github.com/dagster-io/jaffle_shop) can be used + +
      + +--- + +## Step 1: Set up your environment + + + Note: We strongly recommend installing Dagster inside a + Python virtualenv. Refer to the{" "} + Dagster installation docs for more + information. + + +Install dbt, Dagster, and the Dagster webserver by running the following: + +```shell +pip install dagster-dbt dagster-webserver +``` + +The `dagster-dbt` library installs both `dbt-core` and `dagster` as dependencies. Refer to the [dbt](https://docs.getdbt.com/dbt-cli/install/overview) and [Dagster](/getting-started/install) installation docs for more information. + +Other requirements based on the dbt project could be needed. In most cases, installing the library that supports your [dbt adapter](https://docs.getdbt.com/docs/supported-data-platforms#types-of-adapters) will be required. For instance, `dbt-duckdb` if you are using DuckDB as a dbt adapter. + +--- + +## Step 2: Load your dbt project into Dagster + +Next, you'll load your dbt project into Dagster. Use the tabs to view instructions for how to accomplish this. + + + + +Select one of the following to load your dbt project into Dagster: + +- [**Option 1:**](#option-1-create-a-single-dagster-file) Create a Dagster project in a single file +- [**Option 2:**](#option-2-create-a-new-dagster-project) Create a new Dagster project that wraps a dbt project by using the `dagster-dbt` command line interface (CLI) +- [**Option 3:**](#option-3-use-an-existing-dagster-project) Use an existing Dagster project + + + + +### Option 1: Create a single Dagster file + +Running your dbt project with Dagster can be easily done after creating a single file. For this example, let's consider a basic use case - say you want to represent your dbt models as Dagster assets and run them daily at midnight. + +With your text editor of choice, create a Python file in the same directory as your dbt project directory and add the following code. Note that since this file contains [all Dagster definitions required for your code location](/concepts/code-locations), it is recommended to name it `definitions.py`. + +The following code assumes that your Python file and dbt project directory are adjacent in the same directory. If that's not the case, make sure to update the `RELATIVE_PATH_TO_MY_DBT_PROJECT` constant so that it points to your dbt project. + +```python file=/integrations/dbt/quickstart/with_single_file.py startafter=start_example endbefore=end_example +from pathlib import Path + +from dagster import AssetExecutionContext, Definitions +from dagster_dbt import ( + DbtCliResource, + DbtProject, + build_schedule_from_dbt_selection, + dbt_assets, +) + +RELATIVE_PATH_TO_MY_DBT_PROJECT = "./my_dbt_project" + +my_project = DbtProject( + project_dir=Path(__file__) + .joinpath("..", RELATIVE_PATH_TO_MY_DBT_PROJECT) + .resolve(), +) +my_project.prepare_if_dev() + + +@dbt_assets(manifest=my_project.manifest_path) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream() + + +my_schedule = build_schedule_from_dbt_selection( + [my_dbt_assets], + job_name="materialize_dbt_models", + cron_schedule="0 0 * * *", + dbt_select="fqn:*", +) + +defs = Definitions( + assets=[my_dbt_assets], + schedules=[my_schedule], + resources={ + "dbt": DbtCliResource(project_dir=my_project), + }, +) +``` + + + + +### Option 2: Create a new Dagster project + +This approach uses the `dagster-dbt` CLI to create a new Dagster project and wrap it around a dbt project. Running this command requires two arguments: + +- `--project-name` - The name of the Dagster project to be created. In our example, this will be `my_dagster_project`. +- `--dbt-project-dir` - The path to the dbt project. In our example, this will be `./my_dbt_project`, which means our current location is in the directory where `my_dbt_project` is located. + +In our example, our command would look like this: + +```shell +dagster-dbt project scaffold --project-name my_dagster_project --dbt-project-dir ./my_dbt_project +``` + +This command will create a new directory called `my_dagster_project/` inside the current directory. The new `my_dagster_project/` directory will contain a set of files that define a Dagster project to load the dbt project provided in `./my_dbt_project`. + + + + +### Option 3: Use an existing Dagster project + +Existing Dagster projects can also be used to run a dbt project. To do this, you'll need to: + +1. Use the [`dagster-dbt` library](/\_apidocs/libraries/dagster-dbt) to add and objects +2. Add the new objects to your Dagster project's object + +**Note**: This example assumes that your existing Dagster project includes both `assets.py` and `definitions.py` files, among other required files like `setup.py` and `pyproject.toml`. For example, your project might look like this: + +```shell +my_dagster_project +├── __init__.py +├── assets.py +├── definitions.py +├── pyproject.toml +├── setup.cfg +└── setup.py +``` + +1. Change directories to the Dagster project directory: + + ```shell + cd my_dagster_project/ + ``` + +2. Create a Python file named `project.py` and add the following code: + + ```python file=/integrations/dbt/quickstart/with_project.py startafter=start_dbt_project_example endbefore=end_dbt_project_example + from pathlib import Path + + from dagster_dbt import DbtProject + + RELATIVE_PATH_TO_MY_DBT_PROJECT = "./my_dbt_project" + + my_project = DbtProject( + project_dir=Path(__file__) + .joinpath("..", RELATIVE_PATH_TO_MY_DBT_PROJECT) + .resolve(), + ) + my_project.prepare_if_dev() + ``` + + The object is a representation of the dbt project that assists with `manifest.json` preparation. + +3. In your project's `assets.py` file, add the following code: + + ```python file=/integrations/dbt/quickstart/with_project.py startafter=start_dbt_assets_example endbefore=end_dbt_assets_example + from dagster import AssetExecutionContext + from dagster_dbt import DbtCliResource, dbt_assets + + from .project import my_project + + @dbt_assets(manifest=my_project.manifest_path) + def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream() + ``` + + The decorator allows Dagster to create a definition for how to compute a set of dbt resources, described by a `manifest.json`. + +4. In your project's `definitions.py` file, update the object to include the newly created objects: + + ```python file=/integrations/dbt/quickstart/with_project.py startafter=start_dbt_definitions_example endbefore=end_dbt_definitions_example + from dagster import Definitions + from dagster_dbt import DbtCliResource + + from .assets import my_dbt_assets + from .project import my_project + + defs = Definitions( + ..., + assets=[ + ..., + # Add the dbt assets alongside your other asset + my_dbt_assets, + ], + resources={ + ...: ..., + # Add the dbt resource alongside your other resources + "dbt": DbtCliResource(project_dir=my_project), + }, + ) + ``` + +With these changes, your existing Dagster project is ready to run your dbt project. + + + + +--- + +## Step 3: Run your dbt project in the Dagster UI + +Now that your code is ready, you can use the [Dagster UI](/concepts/webserver/ui) to take a look at your dbt project. Use the tabs to view instructions for starting the UI. + + + + +How you [start the UI](/guides/running-dagster-locally) depends on which approach you took to load your dbt project into Dagster: + +- **If you created a single Dagster file**, [use Option 1](#option-1-from-a-dagster-file) +- **If you created a new Dagster project or used an existing project**, [use Option 2](#option-2-from-a-dagster-project) + + + + +### Option 1: From a Dagster file + +1. Locate the Dagster file containing your definitions. If you created a single Dagster file in the [previous section (Option 1)](#option-1-create-a-single-dagster-file), this file will be `definitions.py`. + +2. To start Dagster's UI, run the following: + + ```shell + dagster dev -f definitions.py + ``` + + Which will result in output similar to: + + ```shell + Serving dagster-webserver on http://127.0.0.1:3000 in process 70635 + ``` + + + + +### Option 2: From a Dagster project + +1. Change directories to the Dagster project directory: + + ```shell + cd my_dagster_project/ + ``` + +2. To start Dagster's UI, run the following: + + ```shell + dagster dev + ``` + + Which will result in output similar to: + + ```shell + Serving dagster-webserver on http://127.0.0.1:3000 in process 70635 + ``` + + + + +In your browser, navigate to . The page will display the asset graph for the job created by the schedule definition: + + + + + +In Dagster, running a dbt model corresponds to _materializing_ an asset. The [schedule definition](/concepts/automation/schedules) included in your Dagster project's code location ( object) will materialize the assets at its next cron tick. + +Assets can also be materialized manually by clicking the **Materialize all** button near the top right corner of the page. + +--- + +## What's next? + +Congratulations on successfully running your dbt project in Dagster! + +To learn more about Dagster's dbt integration and how to handle more complex use cases, you can: + +- Complete the [Dagster & dbt tutorial](/integrations/dbt/using-dbt-with-dagster) +- Learn how to [use dbt with Dagster+](/integrations/dbt/using-dbt-with-dagster-plus) +- Check out the official [Dagster & dbt course on Dagster University](https://courses.dagster.io) + +--- + +## Related + + + + + + + diff --git a/docs/content/integrations/dbt/reference.mdx b/docs/content/integrations/dbt/reference.mdx index dd09e5732a79c..5da3094b4045d 100644 --- a/docs/content/integrations/dbt/reference.mdx +++ b/docs/content/integrations/dbt/reference.mdx @@ -12,32 +12,31 @@ description: Dagster can orchestrate dbt alongside other technologies. This reference provides a high-level look at working with dbt models through Dagster's [software-defined assets](/concepts/assets/software-defined-assets) framework using the [`dagster-dbt` integration library](/\_apidocs/libraries/dagster-dbt). -For a step-by-step implementation walkthrough, refer to the [Using dbt with Dagster software-defined assets tutorial](/integrations/dbt/using-dbt-with-dagster). +For a step-by-step implementation walkthrough, refer to the [Using dbt with Dagster asset definitions tutorial](/integrations/dbt/using-dbt-with-dagster). --- ## Relevant APIs -| Name | Description | -| ---------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| [`dagster-dbt project scaffold`](/_apidocs/libraries/dagster-dbt#dagster-dbt-project-scaffold) | A CLI command to initialize a new Dagster project for an existing dbt project. | -| | A decorator used to define Dagster assets for dbt models defined in a dbt manifest. | -| | A class that defines a Dagster resource used to execute dbt CLI commands. | -| | A class that defines the representation of an invoked dbt command. | -| | A class that can be overridden to customize how Dagster asset metadata is derived from a dbt manifest. | -| | A class with settings to enable Dagster features for a dbt project. | -| | A class that defines a selection of assets from a dbt manifest and a dbt selection string. | -| | A helper method that builds a from a dbt manifest and dbt selection string. | -| | A helper method that builds a from a dbt manifest, dbt selection string, and cron string. | -| | A helper method that retrieves the for a dbt model. | -| | A helper method that retrieves the for a dbt source with a singular table. | -| | A helper method that retrieves the 's for a dbt source with multiple tables. | -| | Deprecated in favor of , , and . | -| | Deprecated in favor of . | +| Name | Description | +| ---------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| [`dagster-dbt project scaffold`](/_apidocs/libraries/dagster-dbt#dagster-dbt-project-scaffold) | A CLI command to initialize a new Dagster project for an existing dbt project. | +| | A decorator used to define Dagster assets for dbt models defined in a dbt manifest. | +| | A class that defines a Dagster resource used to execute dbt CLI commands. | +| | A class that defines the representation of an invoked dbt command. | +| | A class that defines the representation of a dbt project and related settings that assist with managing dependencies and `manifest.json` preparation. | +| | A class that can be overridden to customize how Dagster asset metadata is derived from a dbt manifest. | +| | A class with settings to enable Dagster features for a dbt project. | +| | A class that defines a selection of assets from a dbt manifest and a dbt selection string. | +| | A helper method that builds a from a dbt manifest and dbt selection string. | +| | A helper method that builds a from a dbt manifest, dbt selection string, and cron string. | +| | A helper method that retrieves the for a dbt model. | +| | A helper method that retrieves the for a dbt source with a singular table. | +| | A helper method that retrieves the 's for a dbt source with multiple tables. | --- -## dbt models and Dagster software-defined assets +## dbt models and Dagster asset definitions @@ -48,7 +47,7 @@ For a step-by-step implementation walkthrough, refer to the [Using dbt with Dags Check out{" "} - part two of the dbt + Dagster tutorial + part two of the dbt & Dagster tutorial {" "} to see this concept in context. @@ -68,7 +67,7 @@ This creates a directory called `project_dagster/` inside the current directory. Check out{" "} - part two of the dbt + Dagster tutorial + part two of the dbt & Dagster tutorial {" "} to see this concept in context. @@ -82,44 +81,33 @@ The manifest can be created in two ways: When deploying your Dagster project to production, **we recommend generating the manifest at build time** to avoid the overhead of recompiling your dbt project every time your Dagster code is executed. A `manifest.json` should be precompiled and included in the Python package for your Dagster code. -In the Dagster project created by the [`dagster-dbt project scaffold`](/\_apidocs/libraries/dagster-dbt#dagster-dbt-project-scaffold) command line interface, we offer you both ways to load your dbt models: +The easiest way to handle the creation of your manifest file is to use . -```python startafter=start_compile_dbt_manifest endbefore=end_compile_dbt_manifest file=/integrations/dbt/dbt.py dedent=4 -import os -from pathlib import Path +In the Dagster project created by the [`dagster-dbt project scaffold`](/\_apidocs/libraries/dagster-dbt#dagster-dbt-project-scaffold) command, the creation of your manifest is handled at run time during development: -from dagster_dbt import DbtCliResource +```python startafter=start_compile_dbt_manifest_with_dbt_project endbefore=end_compile_dbt_manifest_with_dbt_project file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path -dbt_project_dir = Path(__file__).joinpath("..", "..", "..").resolve() -dbt = DbtCliResource(project_dir=os.fspath(dbt_project_dir)) +from dagster_dbt import DbtProject -# If DAGSTER_DBT_PARSE_PROJECT_ON_LOAD is set, a manifest will be created at runtime. -# Otherwise, we expect a manifest to be present in the project's target directory. -if os.getenv("DAGSTER_DBT_PARSE_PROJECT_ON_LOAD"): - dbt_manifest_path = ( - dbt.cli( - ["--quiet", "parse"], - target_path=Path("target"), - ) - .wait() - .target_path.joinpath("manifest.json") - ) -else: - dbt_manifest_path = dbt_project_dir.joinpath("target", "manifest.json") +my_dbt_project = DbtProject( + project_dir=Path(__file__).joinpath("..", "..", "..").resolve(), + packaged_project_dir=Path(__file__) + .joinpath("..", "..", "dbt-project") + .resolve(), +) +my_dbt_project.prepare_if_dev() ``` -As the comment explains, the code gives you a choice about how to create this dbt manifest. Based on the `DAGSTER_DBT_PARSE_PROJECT_ON_LOAD` environment variable, either: - -1. **At run time**: This code generates the `manifest.json` for you. This is the easiest option during development because you never need to worry about the file being out-of-date with your dbt project, or -2. **At build time**: This code leaves it up to you to generate the `manifest.json` file on your own, and this code just reads it. +The manifest path can then be accessed with `my_dbt_project.manifest_path`. When developing locally, you can run the following command to generate the manifest at run time for your dbt and Dagster project: ```shell -DAGSTER_DBT_PARSE_PROJECT_ON_LOAD=1 dagster dev +dagster dev ``` -In production, `DAGSTER_DBT_PARSE_PROJECT_ON_LOAD` should be unset so that your project uses the precompiled manifest. +In production, a precompiled manifest should be used. Using , the manifest can be created at build time by running the [`dagster-dbt project prepare-and-package`](/\_apidocs/libraries/dagster-dbt#dagster-dbt-project-prepare-and-package) command in your CI/CD workflow. For more information, see the [Deploying a Dagster project with a dbt project](#deploying-a-dagster-project-with-a-dbt-project) section. --- @@ -144,17 +132,19 @@ In your CI/CD workflows for your Dagster project: 1. Include any secrets that are required by your dbt project in your CI/CD environment. 2. Clone the dbt project repository as a subdirectory of your Dagster project. -3. Run `dbt deps` to build your dbt project's dependencies. -4. Run `dbt parse` to create a dbt manifest for your Dagster project. +3. Run `dagster-dbt project prepare-and-package --file path/to/project.py` to + - Build your dbt project's dependencies, + - Create a dbt manifest for your Dagster project, and + - Package your dbt project In the CI/CD workflows for your dbt project, set up a dispatch action to trigger a deployment of your Dagster project when your dbt project changes. ### Deploying a dbt project from a monorepo - With Dagster Cloud, we streamline this - option. As part of our Dagster Cloud onboarding for dbt users, we can - automatically create a Dagster project in an existing dbt project repository. + With Dagster+, we streamline this + option. As part of our Dagster+ onboarding for dbt users, we can automatically + create a Dagster project in an existing dbt project repository. If you are managing your Dagster project in the same git repository as your dbt project, you should include the following steps in your CI/CD workflows. @@ -162,8 +152,59 @@ If you are managing your Dagster project in the same git repository as your dbt In your CI/CD workflows for your Dagster and dbt project: 1. Include any secrets that are required by your dbt project in your CI/CD environment. -2. Run `dbt deps` to build your dbt project's dependencies. -3. Run `dbt parse` to create a dbt manifest for your Dagster project. +2. Run `dagster-dbt project prepare-and-package --file path/to/project.py` to + - Build your dbt project's dependencies, + - Create a dbt manifest for your Dagster project, and + - Package your dbt project + +--- + +## Using config with `@dbt_assets` + +Like for a standard \[software-defined asset], `@dbt_assets` can use a config system to enable [run configuration](/concepts/configuration/config-schema). This allows to provide parameters to jobs at the time they're executed. + +In the context of dbt, this can be useful if you want to run commands or flags for specific use cases. For instance, you may want to add [the --full-refresh flag](https://docs.getdbt.com/reference/resource-configs/full_refresh) to your dbt commands in some cases. Using a config system, the `@dbt_assets` object can be easily modified to support this use case. + +```python startafter=start_config_dbt_assets endbefore=end_config_dbt_assets file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path + +from dagster import AssetExecutionContext, Config +from dagster_dbt import DbtCliResource, DbtProject, dbt_assets + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +class MyDbtConfig(Config): + full_refresh: bool + +@dbt_assets(manifest=my_dbt_project.manifest_path) +def my_dbt_assets( + context: AssetExecutionContext, dbt: DbtCliResource, config: MyDbtConfig +): + dbt_build_args = ["build"] + if config.full_refresh: + dbt_build_args += ["--full-refresh"] + + yield from dbt.cli(dbt_build_args, context=context).stream() +``` + +Now that the `@dbt_assets` object is updated, the run configuration can be passed to a job. + +```python startafter=start_config_dbt_job endbefore=end_config_dbt_job file=/integrations/dbt/dbt.py dedent=4 +from dagster import RunConfig, define_asset_job +from dagster_dbt import build_dbt_asset_selection + +my_job = define_asset_job( + name="all_dbt_assets", + selection=build_dbt_asset_selection( + [my_dbt_assets], + ), + config=RunConfig( + ops={"my_dbt_assets": MyDbtConfig(full_refresh=True, seed=True)} + ), +) +``` + +In the example above, the job is configured to use the `--full-refresh` flag with the dbt build command when materializing the assets. --- @@ -179,14 +220,15 @@ In this example, we use the AssetKey: return super().get_asset_key(dbt_resource_props).with_prefix("snowflake") @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=CustomDagsterDbtTranslator(), ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): @@ -295,7 +337,7 @@ For dbt models, seeds, and snapshots, the default Dagster group name will be the | | `None` | `None` | | | finance | finance | -There are two ways to customize the asset keys generated by Dagster for dbt assets: +There are two ways to customize the group names generated by Dagster for dbt assets: 1. Defining [meta config](https://docs.getdbt.com/reference/resource-configs/meta) on your dbt node, or 2. Overriding Dagster's group name generation by implementing a custom @@ -316,10 +358,10 @@ Alternatively, to override the Dagster group name generation for all dbt nodes i ```python startafter=start_custom_group_name_dagster_dbt_translator endbefore=end_custom_group_name_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 from pathlib import Path from dagster import AssetExecutionContext -from dagster_dbt import DagsterDbtTranslator, DbtCliResource, dbt_assets +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets from typing import Any, Mapping, Optional -manifest_path = Path("path/to/dbt_project/target/manifest.json") +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) class CustomDagsterDbtTranslator(DagsterDbtTranslator): def get_group_name( @@ -328,7 +370,59 @@ class CustomDagsterDbtTranslator(DagsterDbtTranslator): return "snowflake" @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, + dagster_dbt_translator=CustomDagsterDbtTranslator(), +) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream() +``` + +### Customizing owners + +For dbt models, seeds, and snapshots, the default Dagster owner will be the email associated with the [dbt group](https://docs.getdbt.com/docs/build/groups) defined for that node. + +| dbt node type | dbt group name | dbt group's email | Resulting Dagster owner | +| --------------------- | -------------- | ------------------- | ----------------------- | +| model, seed, snapshot | `GROUP_NAME` | `OWNER@DOMAIN.COM` | `OWNER@DOMAIN.COM` | +| | `GROUP_NAME` | `None` | `None` | +| | `None` | `None` | `None` | +| | finance | `owner@company.com` | `owner@company.com` | +| | finance | `None` | `None` | + +There are two ways to customize the asset keys generated by Dagster for dbt assets: + +1. Defining [meta config](https://docs.getdbt.com/reference/resource-configs/meta) on your dbt node, or +2. Overriding Dagster's generation of owners by implementing a custom + +To override the owners generated by Dagster for a dbt node, you can define a `meta` key in your dbt project file, on your dbt node's property file, or on the node's in-file config block. The following example overrides the Dagster owners for the following model as `owner@company.com` and `team:data@company.com`: + +```yaml +models: + - name: customers + config: + meta: + dagster: + owners: ["owner@company.com", "team:data@company.com"] +``` + +Alternatively, to override the Dagster generation of owners for all dbt nodes in your dbt project, you can create a custom and implement . The following example defines `owner@company.com` and `team:data@company.com` as the owners for all dbt nodes: + +```python startafter=start_custom_owners_dagster_dbt_translator endbefore=end_custom_owners_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path +from dagster import AssetExecutionContext +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets +from typing import Any, Mapping, Optional, Sequence + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +class CustomDagsterDbtTranslator(DagsterDbtTranslator): + def get_owners( + self, dbt_resource_props: Mapping[str, Any] + ) -> Optional[Sequence[str]]: + return ["owner@company.com", "team:data@company.com"] + +@dbt_assets( + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=CustomDagsterDbtTranslator(), ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): @@ -345,17 +439,17 @@ To override the Dagster description for all dbt nodes in your dbt project, you c import textwrap from pathlib import Path from dagster import AssetExecutionContext -from dagster_dbt import DagsterDbtTranslator, DbtCliResource, dbt_assets +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets from typing import Any, Mapping -manifest_path = Path("path/to/dbt_project/target/manifest.json") +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) class CustomDagsterDbtTranslator(DagsterDbtTranslator): def get_description(self, dbt_resource_props: Mapping[str, Any]) -> str: return textwrap.indent(dbt_resource_props.get("raw_sql", ""), "\t") @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=CustomDagsterDbtTranslator(), ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): @@ -364,17 +458,17 @@ def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): ### Customizing metadata -For dbt models, seeds, and snapshots, the default Dagster metadata will be the dbt node's table schema. +For dbt models, seeds, and snapshots, the default Dagster definition metadata will be the dbt node's declared column schema. -To override the Dagster metadata for all dbt nodes in your dbt project, you can create a custom and implement . The following example defines the metadata of the dbt node as the Dagster metadata, using : +To override the Dagster definition metadata for all dbt nodes in your dbt project, you can create a custom and implement . The following example defines the metadata of the dbt node as the Dagster metadata, using : ```python startafter=start_custom_metadata_dagster_dbt_translator endbefore=end_custom_metadata_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 from pathlib import Path from dagster import MetadataValue, AssetExecutionContext -from dagster_dbt import DagsterDbtTranslator, DbtCliResource, dbt_assets +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets from typing import Any, Mapping -manifest_path = Path("path/to/dbt_project/target/manifest.json") +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) class CustomDagsterDbtTranslator(DagsterDbtTranslator): def get_metadata( @@ -385,145 +479,192 @@ class CustomDagsterDbtTranslator(DagsterDbtTranslator): } @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=CustomDagsterDbtTranslator(), ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): yield from dbt.cli(["build"], context=context).stream() ``` -### Customizing auto-materialize policies +Dagster also supports fetching additional metadata at dbt execution time to attach to asset materializations. For more information, see the [Customizing asset materialization metadata](#customizing-asset-materialization-metadata) section. -For dbt models, seeds, and snapshots, the default will be `None`. +#### Attaching code reference metadata -There are two ways to customize the auto-materialize policies generated by Dagster for dbt assets: +Dagster's dbt integration can automatically attach [code reference](/guides/dagster/code-references) metadata to the SQL files backing your dbt assets. To enable this feature, set the `enable_code_references` parameter to `True` in the passed to your : -1. Defining [meta config](https://docs.getdbt.com/reference/resource-configs/meta) on your dbt node, or -2. Overriding Dagster's auto-materialize policy generation by implementing a custom . - -To add an `AutoMaterializePolicy` to a dbt node, you can define a `meta` key in your dbt project file, on your dbt node's property file, or on the node's in-file config block. This policy may be one of two types, `eager` or `lazy`. The following example provides an eager `AutoMaterializePolicy` for the following model: +```python file=/guides/dagster/code_references/with_dbt_code_references.py +from pathlib import Path -```yaml -models: - - name: customers - config: - meta: - dagster: - auto_materialize_policy: - type: eager -``` +from dagster_dbt import ( + DagsterDbtTranslator, + DagsterDbtTranslatorSettings, + DbtCliResource, + DbtProject, + dbt_assets, +) -Alternatively, to override the Dagster auto-materialize policy generation for all dbt nodes in your dbt project, you can create a custom and implement . The following example defines as the auto-materialize policy for all dbt nodes: +from dagster import AssetExecutionContext, Definitions, with_source_code_references -```python startafter=start_custom_auto_materialize_policy_dagster_dbt_translator endbefore=end_custom_auto_materialize_policy_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 -from pathlib import Path -from dagster import AssetExecutionContext, AutoMaterializePolicy -from dagster_dbt import DagsterDbtTranslator, DbtCliResource, dbt_assets -from typing import Any, Mapping, Optional +my_project = DbtProject(project_dir=Path("path/to/dbt_project")) -manifest_path = Path("path/to/dbt_project/target/manifest.json") +# links to dbt model source code from assets +dagster_dbt_translator = DagsterDbtTranslator( + settings=DagsterDbtTranslatorSettings(enable_code_references=True) +) -class CustomDagsterDbtTranslator(DagsterDbtTranslator): - def get_auto_materialize_policy( - self, dbt_resource_props: Mapping[str, Any] - ) -> Optional[AutoMaterializePolicy]: - return AutoMaterializePolicy.eager() @dbt_assets( - manifest=manifest_path, - dagster_dbt_translator=CustomDagsterDbtTranslator(), + manifest=my_project.manifest_path, + dagster_dbt_translator=dagster_dbt_translator, + project=my_project, ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): yield from dbt.cli(["build"], context=context).stream() + + +defs = Definitions(assets=with_source_code_references([my_dbt_assets])) ``` -### Customizing freshness policies +### Customizing tags -For dbt models, seeds, and snapshots, the default will be `None`. + + In Dagster, tags are key-value pairs. However, in dbt, tags are strings. To + bridge this divide, the dbt tag string is used as the Dagster tag key, and the + Dagster tag value is set to the empty string, + "". Any dbt tags that don't match Dagster's supported tag key format + (e.g. they contain unsupported characters) will be ignored by default. + -There are two ways to customize the freshness policies generated by Dagster for dbt assets: +For dbt models, seeds, and snapshots, the default Dagster tags will be the dbt node's configured tags. -1. Defining [meta config](https://docs.getdbt.com/reference/resource-configs/meta) on your dbt node, or -2. Overriding Dagster's freshness policy generation by implementing a custom . +Any dbt tags that don't match Dagster's supported tag key format (e.g. they contain unsupported characters) will be ignored. -To add a `FreshnessPolicy` to a dbt node, you can define a `meta` key in your dbt project file, on your dbt node's property file, or on the node's in-file config block. This config accepts identical arguments to the `FreshnessPolicy` class. The following example applies a `FreshnessPolicy` for the following model: +To override the Dagster tags for all dbt nodes in your dbt project, you can create a custom and implement . The following converts dbt tags of the form `foo=bar` to key/value pairs: -```yaml -models: - - name: customers - config: - meta: - dagster: - freshness_policy: - maximum_lag_minutes: 10 - cron_schedule: 0 * * * * - cron_schedule_timezone: US/Pacific +```python startafter=start_custom_tags_dagster_dbt_translator endbefore=end_custom_tags_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path +from dagster import AssetExecutionContext +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets +from typing import Any, Mapping + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +class CustomDagsterDbtTranslator(DagsterDbtTranslator): + def get_tags(self, dbt_resource_props: Mapping[str, Any]) -> Mapping[str, str]: + dbt_tags = dbt_resource_props.get("tags", []) + dagster_tags = {} + for tag in dbt_tags: + key, _, value = tag.partition("=") + + dagster_tags[key] = value if value else "" + + return dagster_tags + +@dbt_assets( + manifest=my_dbt_project.manifest_path, + dagster_dbt_translator=CustomDagsterDbtTranslator(), +) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream() ``` -Alternatively, to override the Dagster freshness policy generation for all dbt nodes in your dbt project, you can create a custom and implement . The following example defines a with `maximum_lag_minutes=60` as the freshness policy for all dbt nodes: +### Customizing automation conditions + +To override the generated for each dbt node in your dbt project, you can create a custom and implement . The following example defines as the condition for all dbt nodes: -```python startafter=start_custom_freshness_policy_dagster_dbt_translator endbefore=end_custom_freshness_policy_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 +```python startafter=start_custom_automation_condition_dagster_dbt_translator endbefore=end_custom_automation_condition_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 from pathlib import Path -from dagster import AssetExecutionContext, FreshnessPolicy -from dagster_dbt import DagsterDbtTranslator, DbtCliResource, dbt_assets +from dagster import AssetExecutionContext, AutomationCondition +from dagster_dbt import DagsterDbtTranslator, DbtCliResource, DbtProject, dbt_assets from typing import Any, Mapping, Optional -manifest_path = Path("path/to/dbt_project/target/manifest.json") +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) class CustomDagsterDbtTranslator(DagsterDbtTranslator): - def get_freshness_policy( + def get_automation_condition( self, dbt_resource_props: Mapping[str, Any] - ) -> Optional[FreshnessPolicy]: - return FreshnessPolicy(maximum_lag_minutes=60) + ) -> Optional[AutomationCondition]: + return AutomationCondition.eager() @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=CustomDagsterDbtTranslator(), ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): yield from dbt.cli(["build"], context=context).stream() ``` ---- +\-- -## dbt models, code versions, and staleness +## dbt models, code versions, and "Unsynced" -Note that Dagster allows the optional specification of a [`code_version`](/concepts/assets/software-defined-assets#asset-code-versions) for each software-defined asset, which are used to track changes. The `code_version` for an asset arising from a dbt model is defined automatically as the hash of the SQL defining the DBT model. This allows the asset graph in the UI to indicate which dbt models have new SQL since they were last materialized. +Note that Dagster allows the optional specification of a [`code_version`](/concepts/assets/software-defined-assets#asset-code-versions) for each asset definition, which are used to track changes. The `code_version` for an asset arising from a dbt model is defined automatically as the hash of the SQL defining the DBT model. This allows the asset graph in the UI to use the "Unsynced" status to indicate which dbt models have new SQL since they were last materialized. --- -## Loading dbt tests as asset checks +## Loading dbt tests as asset checks - Dagster asset checks are currently an experimental feature. {" "} - To provide feedback, join our{" "} - - GitHub discussion - {" "} - to share your use case with Dagster asset checks and dbt. + Asset checks for dbt have been enabled by default, starting in `dagster-dbt` 0.23.0.
      + `dbt-core` 1.6 or later is required for full functionality. +
      -Dagster allows you to model your existing dbt tests as [asset checks](/concepts/assets/asset-checks). +Dagster loads your dbt tests as [asset checks](/concepts/assets/asset-checks). + +### Indirect selection + +Dagster uses [dbt indirect selection](https://docs.getdbt.com/reference/global-configs/indirect-selection) to select dbt tests. By default, Dagster won't set `DBT_INDIRECT_SELECTION` so that the set of tests selected by Dagster is the same as the selected by dbt. When required, Dagster will override `DBT_INDIRECT_SELECTION` to `empty` in order to explicitly select dbt tests. For example: + +- Materializing dbt assets and excluding their asset checks +- Executing dbt asset checks without materializing their assets + +### Singular tests + +Dagster will load both generic and singular tests as asset checks. In the event that your singular test depends on multiple dbt models, you can use dbt metadata to specify which Dagster asset it should target. These fields can be filled in as they would be for the dbt [ref function](https://docs.getdbt.com/reference/dbt-jinja-functions/ref). The configuration can be supplied in a [config block](https://docs.getdbt.com/reference/data-test-configs) for the singular test. + +```sql +{{ + config( + meta={ + 'dagster': { + 'ref': { + 'name': 'customers', + 'package_name': 'my_dbt_assets' + 'version': 1, + }, + } + } + ) +}} +``` + +`dbt-core` version 1.6 or later is required for Dagster to read this metadata. + +If this metadata isn't provided, Dagster won't ingest the test as an asset check. It will still run the test and emit a events with the test results. -To enable this for your dbt project, you'll need to define a with that have asset checks enabled. The following example enables asset checks when using : +### Disabling asset checks -```python startafter=start_enable_asset_check_dagster_dbt_translator endbefore=end_enable_asset_check_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 +You can disable modeling your dbt tests as asset checks. The tests will still run and will be emitted as events. To do so you'll need to define a with that have asset checks disabled. The following example disables asset checks when using : + +```python startafter=start_disable_asset_check_dagster_dbt_translator endbefore=end_disable_asset_check_dagster_dbt_translator file=/integrations/dbt/dbt.py dedent=4 from pathlib import Path from dagster import AssetExecutionContext from dagster_dbt import ( DagsterDbtTranslator, DagsterDbtTranslatorSettings, DbtCliResource, + DbtProject, dbt_assets, ) -manifest_path = Path("path/to/dbt_project/target/manifest.json") +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) dagster_dbt_translator = DagsterDbtTranslator( - settings=DagsterDbtTranslatorSettings(enable_asset_checks=True) + settings=DagsterDbtTranslatorSettings(enable_asset_checks=False) ) @dbt_assets( - manifest=manifest_path, + manifest=my_dbt_project.manifest_path, dagster_dbt_translator=dagster_dbt_translator, ) def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): @@ -532,6 +673,100 @@ def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): --- +## Customizing asset materialization metadata + +Dagster supports fetching additional metadata at dbt execution time to attach as [materialization metadata](/concepts/metadata-tags/asset-metadata), which is recorded each time your models are rebuilt and displayed in the Dagster UI. + +### Fetching row count data + + + + Emitting row count data for dbt models is currently an experimental feature. + {" "} + To use this feature, you'll need to be on at least{" "} + dagster>=0.17.6 and + dagster-dbt>=0.23.6. + + +Dagster can automatically fetch [row counts](/concepts/metadata-tags/asset-metadata/table-metadata#attaching-row-count) for dbt-generated tables and emit them as [materialization metadata](/concepts/metadata-tags/asset-metadata) to be displayed in the Dagster UI. + +Row counts are fetched in parallel to the execution of your dbt models. To enable this feature, call on the returned by the `stream()` dbt CLI call: + +```python startafter=start_fetch_row_count endbefore=end_fetch_row_count file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path +from dagster import AssetExecutionContext +from dagster_dbt import DbtProject, DbtCliResource, dbt_assets + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +@dbt_assets( + manifest=my_dbt_project.manifest_path, +) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream().fetch_row_counts() +``` + +Once your dbt models have been materialized, you can view the row count data in the metadata table. + +### Fetching column-level metadata + + + + Emitting column-level metadata for dbt models is currently an experimental + feature. + {" "} + To use this feature, you'll need to be on at least dagster>=1.8.0{" "} + and + dagster-dbt>=0.24.0. + + +Dagster allows you to emit column-level metadata, like [column schema](/concepts/metadata-tags/asset-metadata/table-metadata#attaching-column-schema) and [column lineage](/concepts/metadata-tags/asset-metadata/column-level-lineage), as [materialization metadata](/concepts/metadata-tags/asset-metadata). + +With this metadata, you can view documentation in Dagster for all columns, not just columns described in your dbt project. + +Column-level metadata is fetched in parallel to the execution of your dbt models. To enable this feature, call on the returned by the `stream()` dbt CLI call: + +```python startafter=start_fetch_column_metadata endbefore=end_fetch_column_metadata file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path +from dagster import AssetExecutionContext +from dagster_dbt import DbtProject, DbtCliResource, dbt_assets + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +@dbt_assets( + manifest=my_dbt_project.manifest_path, +) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from ( + dbt.cli(["build"], context=context).stream().fetch_column_metadata() + ) +``` + +### Composing metadata fetching methods + +Metadata fetching methods such as can be chained with other metadata fetching methods like : + +```python startafter=start_fetch_column_metadata_chain endbefore=end_fetch_column_metadata_chain file=/integrations/dbt/dbt.py dedent=4 +from pathlib import Path +from dagster import AssetExecutionContext +from dagster_dbt import DbtProject, DbtCliResource, dbt_assets + +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) + +@dbt_assets( + manifest=my_dbt_project.manifest_path, +) +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): + yield from ( + dbt.cli(["build"], context=context) + .stream() + .fetch_row_counts() + .fetch_column_metadata() + ) +``` + +--- + ## Defining dependencies - [Upstream dependencies](#upstream-dependencies) @@ -547,8 +782,7 @@ Dagster allows you to define existing assets as upstream dependencies of dbt mod from dagster import asset @asset -def upstream(): - ... +def upstream(): ... ``` In order to define this asset as an upstream dependency for a dbt model, you'll need to first declare it as a [data source](https://docs.getdbt.com/docs/building-a-dbt-project/using-sources#declaring-a-source) in the `sources.yml` file. Here, you can explicitly provide your asset key to a source table: @@ -601,8 +835,7 @@ from dagster import asset, AssetExecutionContext from dagster_dbt import DbtCliResource, get_asset_key_for_source, dbt_assets @dbt_assets(manifest=MANIFEST_PATH) -def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): - ... +def my_dbt_assets(context: AssetExecutionContext, dbt: DbtCliResource): ... @asset(key=get_asset_key_for_source([my_dbt_assets], "jaffle_shop")) def orders(): @@ -650,8 +883,7 @@ from dagster_dbt import get_asset_key_for_model from dagster import asset @asset(deps=[get_asset_key_for_model([my_dbt_assets], "my_dbt_model")]) -def my_downstream_asset(): - ... +def my_downstream_asset(): ... ``` In the downstream asset, you may want direct access to the contents of the dbt model. To do so, you can customize the code within your `@asset`-decorated function to load upstream data. @@ -683,25 +915,23 @@ Partitioned assets will be able to access the decorator, all assets are defined to operate on the same partitions. With this in mind, we can retrieve any time window from property in order to get the current start and end partitions. -```python +```python startafter=start_build_incremental_model endbefore=end_build_incremental_model file=/integrations/dbt/dbt.py dedent=4 import json from pathlib import Path -from dagster import DailyPartitionDefinition, OpExecutionContext -from dagster_dbt import DbtCliResource, dbt_assets +from dagster import DailyPartitionsDefinition, OpExecutionContext +from dagster_dbt import DbtCliResource, DbtProject, dbt_assets +my_dbt_project = DbtProject(project_dir=Path("path/to/dbt_project")) @dbt_assets( - manifest=Path("target", "manifest.json"), - partitions_def=DailyPartitionsDefinition(start_date="2023-01-01") + manifest=my_dbt_project.manifest_path, + partitions_def=DailyPartitionsDefinition(start_date="2023-01-01"), ) def partitionshop_dbt_assets(context: OpExecutionContext, dbt: DbtCliResource): start, end = context.partition_time_window - dbt_vars = { - "min_date": start.isoformat(), - "max_date": end.isoformat() - } + dbt_vars = {"min_date": start.isoformat(), "max_date": end.isoformat()} dbt_build_args = ["build", "--vars", json.dumps(dbt_vars)] yield from dbt.cli(dbt_build_args, context=context).stream() @@ -710,8 +940,8 @@ def partitionshop_dbt_assets(context: OpExecutionContext, dbt: DbtCliResource): With the variables defined, we can now reference `min_date` and `max_date` in our SQL and configure the dbt model as incremental. Here, we define an incremental run to operate on rows with `order_date` that is between our `min_date` and `max_date`. ```sql --- Configure the model as incremental -{{ config(materialized='incremental') }} +-- Configure the model as incremental, use a unique_key and the delete+insert strategy to ensure the pipeline is idempotent. +{{ config(materialized='incremental', unique_key='order_date', incremental_strategy="delete+insert") }} select * from {{ ref('my_model') }} @@ -727,7 +957,7 @@ where order_date >= '{{ var('min_date') }}' and order_date <= '{{ var('max_date' /project.py ## Replace with the project.py location in the Dagster project folder + shell: bash + ``` + + When you add this step, you'll need to: + + - **Add any [adapters](https://docs.getdbt.com/docs/connect-adapters) and libraries used by dbt to your `setup.py` file**. + - **Add the location of your Dagster project directory** to the `dagster-dbt project prepare-and-package` command. + +5. Save the changes. + +6. Open the `branch_deployments.yml` file and repeat steps 3 - 5. + +7. Commit the changes to the repository. + +Once the new step is pushed to the remote, your workflow will be updated to prepare your dbt project before building and pushing your docker image. + +--- + +## Using a local agent + +When using a local agent for your Hybrid deployments in Dagster+, your Dagster code and dbt project must be in a Python environment that can be accessed on the same machine as your agent. + +When updating the dbt project, it is important to refresh the [manifest file](https://docs.getdbt.com/reference/artifacts/manifest-json) and [project dependencies](https://docs.getdbt.com/docs/collaborate/govern/project-dependencies) to ensure that they are up-to-date when used with your Dagster code. This can easily be done by running the [`dagster-dbt project prepare-and-package`](/\_apidocs/libraries/dagster-dbt#dagster-dbt-project-prepare-and-package) command. + +--- + +## Related + + + + + + + + + + diff --git a/docs/content/integrations/dbt/using-dbt-with-dagster-plus/serverless.mdx b/docs/content/integrations/dbt/using-dbt-with-dagster-plus/serverless.mdx new file mode 100644 index 0000000000000..7d90b5f6c8410 --- /dev/null +++ b/docs/content/integrations/dbt/using-dbt-with-dagster-plus/serverless.mdx @@ -0,0 +1,261 @@ +--- +title: "Using dbt with Serverless deployments in Dagster+ | Dagster Docs" +description: Deploy your dbt & Dagster project with Serverless deployments in Dagster+. +--- + +# Using dbt with Serverless deployments in Dagster+ + +Importing an existing dbt project in Dagster+ allows you to automatically load your dbt models as Dagster assets. This can be be done with: + +- An existing dbt project that is not already using Dagster, or +- A Dagster project in which your dbt project is included + +In this guide, we'll demonstrate by using an existing dbt project that doesn't use Dagster. + +--- + +## Prerequisites + +To follow the steps in this guide, you'll need **Dagster+ Organization Admin**, **Admin**, or **Editor** permissions. This is required to create a code location. + +You'll also need **an existing dbt project** that contains the following files in the repository root: + +- [`dbt_project.yml`](https://docs.getdbt.com/reference/dbt_project.yml) +- [`profiles.yml`](https://docs.getdbt.com/docs/core/connect-data-platform/profiles.yml) + +--- + +## Step 1: Import your project in Dagster+ + +In this section, we'll demonstrate how to import an existing project to Dagster+. Our example imports the project from a GitHub repository, but Dagster+ also supports Gitlab. + +1. Sign in to your Dagster+ account. + +2. Navigate to **Deployment > Code locations**. + +3. Click **Add code location**. + +4. Depending on the type of project you imported, this step will vary: + + - **For dbt-only projects**, click **Import a dbt project**, then **Continue**. + - **For dbt and Dagster projects,** click **Import a Dagster project**. + +5. At this point, you'll be prompted to select either GitHub or Gitlab. For this guide, we'll select **GitHub**. + +6. If prompted, sign into your GitHub account and complete the authorization process for the Dagster+ application. **Note**: The profile or organization you're using to authorize Dagster+ must have read and write access to the repository containing the project. After the authorization is complete, you'll be redirected back to Dagster+. + +7. In Dagster+, locate and select the repository containing the project by using the dropdowns. **Note**: dbt projects must have `dbt_profiles.yml` and `profiles.yml` files in the repository root or an error will display. + +8. Click **Continue** to begin the import process. + +9. The last step of the import process adds a few files, which we'll discuss in the next section, to the project. Depending on the type of project you imported, this step will vary: + + - **For dbt-only projects**, Dagster+ will open a pull request to update the repository. You'll need to review and merge the pull request to complete the process. + - **For dbt and Dagster projects,** Dagster+ will directly commit the files to the repository. + +Once Dagster+ finishes importing the project, move onto the next step. + +--- + +## Step 2: Review the repository changes + +The file structure of the repository will change the first time a project is deployed using Dagster+. For dbt projects, a few things will happen: + +- **A [`dagster_cloud.yaml` file](/dagster-plus/managing-deployments/dagster-cloud-yaml) will be created.** This file defines the project as a Dagster+ code location. +- **A few `.yml` files, used for CI/CD, will be created in `.github/workflows`.** [These files](/dagster-plus/references/ci-cd-file-reference), named `branch_deployments.yml` and `deploy.yml`, manage the deployments of the repository. +- **For dbt-only projects being deployed for the first time**, Dagster+ will create a new Dagster project in the repository using the [`dagster-dbt scaffold`](/integrations/dbt/reference#scaffolding-a-dagster-project-from-a-dbt-project) command. This will result in a Dagster project that matches the dbt project. For example, a dbt project named `my_dbt_project` will contain a Dagster project in `my_dbt_project/my_dbt_project` after the process completes. Refer to the [Dagster project files reference](/guides/understanding-dagster-project-files) to learn more about the files in a Dagster project. + +**Use the following tabs** to see how the repository will change for a [dbt-only project](#dbt-only-projects) and a [dbt and Dagster project](#dbt-and-dagster-projects) being deployed for the first time. + + + + +### dbt-only projects + + + Looking for dbt and Dagster projects?{" "} + Click here! + + +Before the Dagster+ changes, a typical dbt project would include files like `dbt_project.yml`, `profiles.yml`, dbt models in `.sql` format, and sbt seeds in `.csv` format. As this is a git repository, other files like `.gitignore`, `LICENSE` and `README.md` may also be included: + +```shell +## dbt-only project +## before Dagster+ deployment + +my_dbt_project +├── models +│   ├── my_model.sql +├── seeds +│   ├── my_seeds.csv +├── .gitignore +├── LICENSE +├── README.md +├── dbt_project.yml +└── profiles.yml +``` + +When the Dagster+ deployment process completes, the repository will now look like the following: + +```shell +## dbt-only project +## after Dagster+ deployment + +my_dbt_project +├── .github ## CI/CD files +│   ├── workflows +│   │   ├── branch_deployments.yml +│   │   ├── deploy.yml +├── models +│   ├── my_model.sql +├── my_dbt_project ## New Dagster project +│   ├── my_dbt_project +│   │   ├── __init__.py +│   │   ├── assets.py +│   │   ├── definitions.py +│   │   ├── project.py +│   │   ├── schedules.py +│   ├── pyproject.toml +│   ├── setup.py +├── seeds +│   ├── my_seeds.csv +├── .gitignore +├── LICENSE +├── README.md +├── dagster_cloud.yaml ## Dagster+ code location file +├── dbt_project.yml +└── profiles.yml +``` + + + + +### dbt and Dagster projects + + + Looking for dbt-only projects?{" "} + Click here! + + +After the Dagster+ changes, a dbt and Dagster project will include the files required for dbt and Dagster, some files related to git, and the newly-added Dagster+ files: + +```shell +## dbt and Dagster project +## after Dagster+ deployment + +my_dbt_and_dagster_project +├── .github ## CI/CD files +│   ├── workflows +│   │   ├── branch_deployments.yml +│   │   ├── deploy.yml +├── dbt +│   ├── models +│   │   ├── my_model.sql +│   ├── seeds +│   │   ├── my_seeds.csv +│   ├── dbt_project.yml +│   ├── profiles.yml +├── my_dbt_and_dagster_project +│   ├── __init__.py +│   ├── assets.py +│   ├── definitions.py +│   ├── project.py +│   ├── schedules.py +├── .gitignore +├── LICENSE +├── README.md +├── dagster_cloud.yaml ## Dagster+ code location file +├── pyproject.toml +└── setup.py +``` + + + + +--- + +## Step 3: Update the CI/CD files + + + Heads up! Skip this step if you imported a{" "} + dbt-only project. + + +The last step is to update the [CI/CD files](/dagster-plus/references/ci-cd-file-reference) in the repository. When you import a dbt project into Dagster+ using the **Import a Dagster project** option, you'll need to add a few steps to allow the dbt project to deploy successfully. + +1. In your Dagster project, locate the `.github/workflows` directory. + +2. Open the `deploy.yml` file. + +3. Locate the `Checkout for Python Executable Deploy` step, which should be on or near line 38. + +4. After this step, add the following: + + ```yaml + - name: Prepare DBT project for deployment + if: steps.prerun.outputs.result == 'pex-deploy' + run: | + python -m pip install pip --upgrade + cd project-repo + pip install . --upgrade --upgrade-strategy eager ## Install the Python dependencies from the setup.py file, ex: dbt-core and dbt-duckdb + dagster-dbt project prepare-and-package --file /project.py ## Replace with the project.py location in the Dagster project folder + shell: bash + ``` + + When you add this step, you'll need to: + + - **Add any [adapters](https://docs.getdbt.com/docs/connect-adapters) and libraries used by dbt to your `setup.py` file**. In this example, we're using `dbt-core` and `dbt-duckdb`. + - **Add the location of your Dagster project directory** to the `dagster-dbt project prepare-and-package` command. In this example, our project is in the `/my_dbt_and_dagster_project` directory. + +5. Save the changes. + +6. Open the `branch_deployments.yml` file and repeat steps 3 - 5. + +7. Commit the changes to the repository. + +Once the new step is pushed to the remote, GitHub will automatically try to run a new job using the updated workflow. + +--- + +## What's next? + +For an end-to-end example, from the project creation to the deployment to Dagster+, check out the Dagster & dbt course in [Dagster University](https://courses.dagster.io). + +--- + +## Related + + + + + + + + + + + diff --git a/docs/content/integrations/dbt/using-dbt-with-dagster.mdx b/docs/content/integrations/dbt/using-dbt-with-dagster.mdx index b6b6fbe10d827..2780ee713bfb3 100644 --- a/docs/content/integrations/dbt/using-dbt-with-dagster.mdx +++ b/docs/content/integrations/dbt/using-dbt-with-dagster.mdx @@ -12,7 +12,7 @@ description: Dagster can orchestrate dbt alongside other technologies. In this tutorial, we'll walk you through integrating dbt with Dagster using a smaller version of dbt's example [jaffle shop project](https://github.com/dbt-labs/jaffle_shop), the [dagster-dbt library](/\_apidocs/libraries/dagster-dbt), and a data warehouse, such as [DuckDB](https://duckdb.org/). -By the end of this tutorial, you'll have your dbt models represented in Dagster along with other [Dagster Software-defined Assets](/integrations/dbt/reference#dbt-models-and-dagster-software-defined-assets) upstream and downstream of them: +By the end of this tutorial, you'll have your dbt models represented in Dagster along with other [Dagster asset definitions](/integrations/dbt/reference#dbt-models-and-dagster-asset-definitions) upstream and downstream of them: /2"?(X=de(new b.type.Rule(J.value.args[0],J.value.args[1]),w),X.body=Ee(X.body),$={value:X,len:y,type:b.type.is_rule(X)?p:A}):(X=new b.type.Rule(J.value,null),$={value:X,len:y,type:p}),X){var ie=X.singleton_variables();ie.length>0&&w.throw_warning(b.warning.singleton(ie,X.head.indicator,F))}return $}else return{type:A,value:b.error.syntax(S[y],"callable expected")};else return{type:A,value:b.error.syntax(S[y]?S[y]:S[y-1],". or operator expected")};return J}function ce(w,S,y){y=y||{},y.from=y.from?y.from:"$tau-js",y.reconsult=y.reconsult!==void 0?y.reconsult:!0;var F=new U(w),J={},X;F.new_text(S);var $=0,ie=F.get_tokens($);do{if(ie===null||!ie[$])break;var be=le(w,ie,$);if(be.type===A)return new H("throw",[be.value]);if(be.value.body===null&&be.value.head.indicator==="?-/1"){var Re=new et(w.session);Re.add_goal(be.value.head.args[0]),Re.answer(function(dt){b.type.is_error(dt)?w.throw_warning(dt.args[0]):(dt===!1||dt===null)&&w.throw_warning(b.warning.failed_goal(be.value.head.args[0],be.len))}),$=be.len;var at=!0}else if(be.value.body===null&&be.value.head.indicator===":-/1"){var at=w.run_directive(be.value.head.args[0]);$=be.len,be.value.head.args[0].indicator==="char_conversion/2"&&(ie=F.get_tokens($),$=0)}else{X=be.value.head.indicator,y.reconsult!==!1&&J[X]!==!0&&!w.is_multifile_predicate(X)&&(w.session.rules[X]=a(w.session.rules[X]||[],function(jt){return jt.dynamic}),J[X]=!0);var at=w.add_rule(be.value,y);$=be.len}if(!at)return at}while(!0);return!0}function Ce(w,S){var y=new U(w);y.new_text(S);var F=0;do{var J=y.get_tokens(F);if(J===null)break;var X=z(w,J,0,w.__get_max_priority(),!1);if(X.type!==A){var $=X.len,ie=$;if(J[$]&&J[$].name==="atom"&&J[$].raw===".")w.add_goal(Ee(X.value));else{var be=J[$];return new H("throw",[b.error.syntax(be||J[$-1],". or operator expected",!be)])}F=X.len+1}else return new H("throw",[X.value])}while(!0);return!0}function de(w,S){w=w.rename(S);var y=S.next_free_variable(),F=Be(w.body,y,S);return F.error?F.value:(w.body=F.value,w.head.args=w.head.args.concat([y,F.variable]),w.head=new H(w.head.id,w.head.args),w)}function Be(w,S,y){var F;if(b.type.is_term(w)&&w.indicator==="!/0")return{value:w,variable:S,error:!1};if(b.type.is_term(w)&&w.indicator===",/2"){var J=Be(w.args[0],S,y);if(J.error)return J;var X=Be(w.args[1],J.variable,y);return X.error?X:{value:new H(",",[J.value,X.value]),variable:X.variable,error:!1}}else{if(b.type.is_term(w)&&w.indicator==="{}/1")return{value:w.args[0],variable:S,error:!1};if(b.type.is_empty_list(w))return{value:new H("true",[]),variable:S,error:!1};if(b.type.is_list(w)){F=y.next_free_variable();for(var $=w,ie;$.indicator==="./2";)ie=$,$=$.args[1];return b.type.is_variable($)?{value:b.error.instantiation("DCG"),variable:S,error:!0}:b.type.is_empty_list($)?(ie.args[1]=F,{value:new H("=",[S,w]),variable:F,error:!1}):{value:b.error.type("list",w,"DCG"),variable:S,error:!0}}else return b.type.is_callable(w)?(F=y.next_free_variable(),w.args=w.args.concat([S,F]),w=new H(w.id,w.args),{value:w,variable:F,error:!1}):{value:b.error.type("callable",w,"DCG"),variable:S,error:!0}}}function Ee(w){return b.type.is_variable(w)?new H("call",[w]):b.type.is_term(w)&&[",/2",";/2","->/2"].indexOf(w.indicator)!==-1?new H(w.id,[Ee(w.args[0]),Ee(w.args[1])]):w}function g(w,S){for(var y=S||new b.type.Term("[]",[]),F=w.length-1;F>=0;F--)y=new b.type.Term(".",[w[F],y]);return y}function me(w,S){for(var y=w.length-1;y>=0;y--)w[y]===S&&w.splice(y,1)}function we(w){for(var S={},y=[],F=0;F=0;S--)if(w.charAt(S)==="/")return new H("/",[new H(w.substring(0,S)),new Ne(parseInt(w.substring(S+1)),!1)])}function xe(w){this.id=w}function Ne(w,S){this.is_float=S!==void 0?S:parseInt(w)!==w,this.value=this.is_float?w:parseInt(w)}var ht=0;function H(w,S,y){this.ref=y||++ht,this.id=w,this.args=S||[],this.indicator=w+"/"+this.args.length}var rt=0;function Te(w,S,y,F,J,X){this.id=rt++,this.stream=w,this.mode=S,this.alias=y,this.type=F!==void 0?F:"text",this.reposition=J!==void 0?J:!0,this.eof_action=X!==void 0?X:"eof_code",this.position=this.mode==="append"?"end_of_stream":0,this.output=this.mode==="write"||this.mode==="append",this.input=this.mode==="read"}function Fe(w){w=w||{},this.links=w}function ke(w,S,y){S=S||new Fe,y=y||null,this.goal=w,this.substitution=S,this.parent=y}function Ye(w,S,y){this.head=w,this.body=S,this.dynamic=y||!1}function Se(w){w=w===void 0||w<=0?1e3:w,this.rules={},this.src_predicates={},this.rename=0,this.modules=[],this.thread=new et(this),this.total_threads=1,this.renamed_variables={},this.public_predicates={},this.multifile_predicates={},this.limit=w,this.streams={user_input:new Te(typeof gl<"u"&&gl.exports?nodejs_user_input:tau_user_input,"read","user_input","text",!1,"reset"),user_output:new Te(typeof gl<"u"&&gl.exports?nodejs_user_output:tau_user_output,"write","user_output","text",!1,"eof_code")},this.file_system=typeof gl<"u"&&gl.exports?nodejs_file_system:tau_file_system,this.standard_input=this.streams.user_input,this.standard_output=this.streams.user_output,this.current_input=this.streams.user_input,this.current_output=this.streams.user_output,this.format_success=function(S){return S.substitution},this.format_error=function(S){return S.goal},this.flag={bounded:b.flag.bounded.value,max_integer:b.flag.max_integer.value,min_integer:b.flag.min_integer.value,integer_rounding_function:b.flag.integer_rounding_function.value,char_conversion:b.flag.char_conversion.value,debug:b.flag.debug.value,max_arity:b.flag.max_arity.value,unknown:b.flag.unknown.value,double_quotes:b.flag.double_quotes.value,occurs_check:b.flag.occurs_check.value,dialect:b.flag.dialect.value,version_data:b.flag.version_data.value,nodejs:b.flag.nodejs.value},this.__loaded_modules=[],this.__char_conversion={},this.__operators={1200:{":-":["fx","xfx"],"-->":["xfx"],"?-":["fx"]},1100:{";":["xfy"]},1050:{"->":["xfy"]},1e3:{",":["xfy"]},900:{"\\+":["fy"]},700:{"=":["xfx"],"\\=":["xfx"],"==":["xfx"],"\\==":["xfx"],"@<":["xfx"],"@=<":["xfx"],"@>":["xfx"],"@>=":["xfx"],"=..":["xfx"],is:["xfx"],"=:=":["xfx"],"=\\=":["xfx"],"<":["xfx"],"=<":["xfx"],">":["xfx"],">=":["xfx"]},600:{":":["xfy"]},500:{"+":["yfx"],"-":["yfx"],"/\\":["yfx"],"\\/":["yfx"]},400:{"*":["yfx"],"/":["yfx"],"//":["yfx"],rem:["yfx"],mod:["yfx"],"<<":["yfx"],">>":["yfx"]},200:{"**":["xfx"],"^":["xfy"],"-":["fy"],"+":["fy"],"\\":["fy"]}}}function et(w){this.epoch=Date.now(),this.session=w,this.session.total_threads++,this.total_steps=0,this.cpu_time=0,this.cpu_time_last=0,this.points=[],this.debugger=!1,this.debugger_states=[],this.level="top_level/0",this.__calls=[],this.current_limit=this.session.limit,this.warnings=[]}function Ue(w,S,y){this.id=w,this.rules=S,this.exports=y,b.module[w]=this}Ue.prototype.exports_predicate=function(w){return this.exports.indexOf(w)!==-1},xe.prototype.unify=function(w,S){if(S&&e(w.variables(),this.id)!==-1&&!b.type.is_variable(w))return null;var y={};return y[this.id]=w,new Fe(y)},Ne.prototype.unify=function(w,S){return b.type.is_number(w)&&this.value===w.value&&this.is_float===w.is_float?new Fe:null},H.prototype.unify=function(w,S){if(b.type.is_term(w)&&this.indicator===w.indicator){for(var y=new Fe,F=0;F=0){var F=this.args[0].value,J=Math.floor(F/26),X=F%26;return"ABCDEFGHIJKLMNOPQRSTUVWXYZ"[X]+(J!==0?J:"")}switch(this.indicator){case"[]/0":case"{}/0":case"!/0":return this.id;case"{}/1":return"{"+this.args[0].toString(w)+"}";case"./2":for(var $="["+this.args[0].toString(w),ie=this.args[1];ie.indicator==="./2";)$+=", "+ie.args[0].toString(w),ie=ie.args[1];return ie.indicator!=="[]/0"&&($+="|"+ie.toString(w)),$+="]",$;case",/2":return"("+this.args[0].toString(w)+", "+this.args[1].toString(w)+")";default:var be=this.id,Re=w.session?w.session.lookup_operator(this.id,this.args.length):null;if(w.session===void 0||w.ignore_ops||Re===null)return w.quoted&&!/^(!|,|;|[a-z][0-9a-zA-Z_]*)$/.test(be)&&be!=="{}"&&be!=="[]"&&(be="'"+x(be)+"'"),be+(this.args.length?"("+o(this.args,function(tr){return tr.toString(w)}).join(", ")+")":"");var at=Re.priority>S.priority||Re.priority===S.priority&&(Re.class==="xfy"&&this.indicator!==S.indicator||Re.class==="yfx"&&this.indicator!==S.indicator||this.indicator===S.indicator&&Re.class==="yfx"&&y==="right"||this.indicator===S.indicator&&Re.class==="xfy"&&y==="left");Re.indicator=this.indicator;var dt=at?"(":"",jt=at?")":"";return this.args.length===0?"("+this.id+")":["fy","fx"].indexOf(Re.class)!==-1?dt+be+" "+this.args[0].toString(w,Re)+jt:["yf","xf"].indexOf(Re.class)!==-1?dt+this.args[0].toString(w,Re)+" "+be+jt:dt+this.args[0].toString(w,Re,"left")+" "+this.id+" "+this.args[1].toString(w,Re,"right")+jt}},Te.prototype.toString=function(w){return"("+this.id+")"},Fe.prototype.toString=function(w){var S="{";for(var y in this.links)this.links.hasOwnProperty(y)&&(S!=="{"&&(S+=", "),S+=y+"/"+this.links[y].toString(w));return S+="}",S},ke.prototype.toString=function(w){return this.goal===null?"<"+this.substitution.toString(w)+">":"<"+this.goal.toString(w)+", "+this.substitution.toString(w)+">"},Ye.prototype.toString=function(w){return this.body?this.head.toString(w)+" :- "+this.body.toString(w)+".":this.head.toString(w)+"."},Se.prototype.toString=function(w){for(var S="",y=0;y=0;J--)F=new H(".",[S[J],F]);return F}return new H(this.id,o(this.args,function(X){return X.apply(w)}),this.ref)},Te.prototype.apply=function(w){return this},Ye.prototype.apply=function(w){return new Ye(this.head.apply(w),this.body!==null?this.body.apply(w):null)},Fe.prototype.apply=function(w){var S,y={};for(S in this.links)this.links.hasOwnProperty(S)&&(y[S]=this.links[S].apply(w));return new Fe(y)},H.prototype.select=function(){for(var w=this;w.indicator===",/2";)w=w.args[0];return w},H.prototype.replace=function(w){return this.indicator===",/2"?this.args[0].indicator===",/2"?new H(",",[this.args[0].replace(w),this.args[1]]):w===null?this.args[1]:new H(",",[w,this.args[1]]):w},H.prototype.search=function(w){if(b.type.is_term(w)&&w.ref!==void 0&&this.ref===w.ref)return!0;for(var S=0;SS&&F0&&(S=this.head_point().substitution.domain());e(S,b.format_variable(this.session.rename))!==-1;)this.session.rename++;if(w.id==="_")return new xe(b.format_variable(this.session.rename));this.session.renamed_variables[w.id]=b.format_variable(this.session.rename)}return new xe(this.session.renamed_variables[w.id])},Se.prototype.next_free_variable=function(){return this.thread.next_free_variable()},et.prototype.next_free_variable=function(){this.session.rename++;var w=[];for(this.points.length>0&&(w=this.head_point().substitution.domain());e(w,b.format_variable(this.session.rename))!==-1;)this.session.rename++;return new xe(b.format_variable(this.session.rename))},Se.prototype.is_public_predicate=function(w){return!this.public_predicates.hasOwnProperty(w)||this.public_predicates[w]===!0},et.prototype.is_public_predicate=function(w){return this.session.is_public_predicate(w)},Se.prototype.is_multifile_predicate=function(w){return this.multifile_predicates.hasOwnProperty(w)&&this.multifile_predicates[w]===!0},et.prototype.is_multifile_predicate=function(w){return this.session.is_multifile_predicate(w)},Se.prototype.prepend=function(w){return this.thread.prepend(w)},et.prototype.prepend=function(w){for(var S=w.length-1;S>=0;S--)this.points.push(w[S])},Se.prototype.success=function(w,S){return this.thread.success(w,S)},et.prototype.success=function(w,y){var y=typeof y>"u"?w:y;this.prepend([new ke(w.goal.replace(null),w.substitution,y)])},Se.prototype.throw_error=function(w){return this.thread.throw_error(w)},et.prototype.throw_error=function(w){this.prepend([new ke(new H("throw",[w]),new Fe,null,null)])},Se.prototype.step_rule=function(w,S){return this.thread.step_rule(w,S)},et.prototype.step_rule=function(w,S){var y=S.indicator;if(w==="user"&&(w=null),w===null&&this.session.rules.hasOwnProperty(y))return this.session.rules[y];for(var F=w===null?this.session.modules:e(this.session.modules,w)===-1?[]:[w],J=0;J1)&&this.again()},Se.prototype.answers=function(w,S,y){return this.thread.answers(w,S,y)},et.prototype.answers=function(w,S,y){var F=S||1e3,J=this;if(S<=0){y&&y();return}this.answer(function(X){w(X),X!==!1?setTimeout(function(){J.answers(w,S-1,y)},1):y&&y()})},Se.prototype.again=function(w){return this.thread.again(w)},et.prototype.again=function(w){for(var S,y=Date.now();this.__calls.length>0;){for(this.warnings=[],w!==!1&&(this.current_limit=this.session.limit);this.current_limit>0&&this.points.length>0&&this.head_point().goal!==null&&!b.type.is_error(this.head_point().goal);)if(this.current_limit--,this.step()===!0)return;var F=Date.now();this.cpu_time_last=F-y,this.cpu_time+=this.cpu_time_last;var J=this.__calls.shift();this.current_limit<=0?J(null):this.points.length===0?J(!1):b.type.is_error(this.head_point().goal)?(S=this.session.format_error(this.points.pop()),this.points=[],J(S)):(this.debugger&&this.debugger_states.push(this.head_point()),S=this.session.format_success(this.points.pop()),J(S))}},Se.prototype.unfold=function(w){if(w.body===null)return!1;var S=w.head,y=w.body,F=y.select(),J=new et(this),X=[];J.add_goal(F),J.step();for(var $=J.points.length-1;$>=0;$--){var ie=J.points[$],be=S.apply(ie.substitution),Re=y.replace(ie.goal);Re!==null&&(Re=Re.apply(ie.substitution)),X.push(new Ye(be,Re))}var at=this.rules[S.indicator],dt=e(at,w);return X.length>0&&dt!==-1?(at.splice.apply(at,[dt,1].concat(X)),!0):!1},et.prototype.unfold=function(w){return this.session.unfold(w)},xe.prototype.interpret=function(w){return b.error.instantiation(w.level)},Ne.prototype.interpret=function(w){return this},H.prototype.interpret=function(w){return b.type.is_unitary_list(this)?this.args[0].interpret(w):b.operate(w,this)},xe.prototype.compare=function(w){return this.idw.id?1:0},Ne.prototype.compare=function(w){if(this.value===w.value&&this.is_float===w.is_float)return 0;if(this.valuew.value)return 1},H.prototype.compare=function(w){if(this.args.lengthw.args.length||this.args.length===w.args.length&&this.id>w.id)return 1;for(var S=0;SF)return 1;if(w.constructor===Ne){if(w.is_float&&S.is_float)return 0;if(w.is_float)return-1;if(S.is_float)return 1}return 0},is_substitution:function(w){return w instanceof Fe},is_state:function(w){return w instanceof ke},is_rule:function(w){return w instanceof Ye},is_variable:function(w){return w instanceof xe},is_stream:function(w){return w instanceof Te},is_anonymous_var:function(w){return w instanceof xe&&w.id==="_"},is_callable:function(w){return w instanceof H},is_number:function(w){return w instanceof Ne},is_integer:function(w){return w instanceof Ne&&!w.is_float},is_float:function(w){return w instanceof Ne&&w.is_float},is_term:function(w){return w instanceof H},is_atom:function(w){return w instanceof H&&w.args.length===0},is_ground:function(w){if(w instanceof xe)return!1;if(w instanceof H){for(var S=0;S0},is_list:function(w){return w instanceof H&&(w.indicator==="[]/0"||w.indicator==="./2")},is_empty_list:function(w){return w instanceof H&&w.indicator==="[]/0"},is_non_empty_list:function(w){return w instanceof H&&w.indicator==="./2"},is_fully_list:function(w){for(;w instanceof H&&w.indicator==="./2";)w=w.args[1];return w instanceof xe||w instanceof H&&w.indicator==="[]/0"},is_instantiated_list:function(w){for(;w instanceof H&&w.indicator==="./2";)w=w.args[1];return w instanceof H&&w.indicator==="[]/0"},is_unitary_list:function(w){return w instanceof H&&w.indicator==="./2"&&w.args[1]instanceof H&&w.args[1].indicator==="[]/0"},is_character:function(w){return w instanceof H&&(w.id.length===1||w.id.length>0&&w.id.length<=2&&n(w.id,0)>=65536)},is_character_code:function(w){return w instanceof Ne&&!w.is_float&&w.value>=0&&w.value<=1114111},is_byte:function(w){return w instanceof Ne&&!w.is_float&&w.value>=0&&w.value<=255},is_operator:function(w){return w instanceof H&&b.arithmetic.evaluation[w.indicator]},is_directive:function(w){return w instanceof H&&b.directive[w.indicator]!==void 0},is_builtin:function(w){return w instanceof H&&b.predicate[w.indicator]!==void 0},is_error:function(w){return w instanceof H&&w.indicator==="throw/1"},is_predicate_indicator:function(w){return w instanceof H&&w.indicator==="//2"&&w.args[0]instanceof H&&w.args[0].args.length===0&&w.args[1]instanceof Ne&&w.args[1].is_float===!1},is_flag:function(w){return w instanceof H&&w.args.length===0&&b.flag[w.id]!==void 0},is_value_flag:function(w,S){if(!b.type.is_flag(w))return!1;for(var y in b.flag[w.id].allowed)if(b.flag[w.id].allowed.hasOwnProperty(y)&&b.flag[w.id].allowed[y].equals(S))return!0;return!1},is_io_mode:function(w){return b.type.is_atom(w)&&["read","write","append"].indexOf(w.id)!==-1},is_stream_option:function(w){return b.type.is_term(w)&&(w.indicator==="alias/1"&&b.type.is_atom(w.args[0])||w.indicator==="reposition/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false")||w.indicator==="type/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="text"||w.args[0].id==="binary")||w.indicator==="eof_action/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="error"||w.args[0].id==="eof_code"||w.args[0].id==="reset"))},is_stream_position:function(w){return b.type.is_integer(w)&&w.value>=0||b.type.is_atom(w)&&(w.id==="end_of_stream"||w.id==="past_end_of_stream")},is_stream_property:function(w){return b.type.is_term(w)&&(w.indicator==="input/0"||w.indicator==="output/0"||w.indicator==="alias/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0]))||w.indicator==="file_name/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0]))||w.indicator==="position/1"&&(b.type.is_variable(w.args[0])||b.type.is_stream_position(w.args[0]))||w.indicator==="reposition/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false"))||w.indicator==="type/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0])&&(w.args[0].id==="text"||w.args[0].id==="binary"))||w.indicator==="mode/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0])&&(w.args[0].id==="read"||w.args[0].id==="write"||w.args[0].id==="append"))||w.indicator==="eof_action/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0])&&(w.args[0].id==="error"||w.args[0].id==="eof_code"||w.args[0].id==="reset"))||w.indicator==="end_of_stream/1"&&(b.type.is_variable(w.args[0])||b.type.is_atom(w.args[0])&&(w.args[0].id==="at"||w.args[0].id==="past"||w.args[0].id==="not")))},is_streamable:function(w){return w.__proto__.stream!==void 0},is_read_option:function(w){return b.type.is_term(w)&&["variables/1","variable_names/1","singletons/1"].indexOf(w.indicator)!==-1},is_write_option:function(w){return b.type.is_term(w)&&(w.indicator==="quoted/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false")||w.indicator==="ignore_ops/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false")||w.indicator==="numbervars/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false"))},is_close_option:function(w){return b.type.is_term(w)&&w.indicator==="force/1"&&b.type.is_atom(w.args[0])&&(w.args[0].id==="true"||w.args[0].id==="false")},is_modifiable_flag:function(w){return b.type.is_flag(w)&&b.flag[w.id].changeable},is_module:function(w){return w instanceof H&&w.indicator==="library/1"&&w.args[0]instanceof H&&w.args[0].args.length===0&&b.module[w.args[0].id]!==void 0}},arithmetic:{evaluation:{"e/0":{type_args:null,type_result:!0,fn:function(w){return Math.E}},"pi/0":{type_args:null,type_result:!0,fn:function(w){return Math.PI}},"tau/0":{type_args:null,type_result:!0,fn:function(w){return 2*Math.PI}},"epsilon/0":{type_args:null,type_result:!0,fn:function(w){return Number.EPSILON}},"+/1":{type_args:null,type_result:null,fn:function(w,S){return w}},"-/1":{type_args:null,type_result:null,fn:function(w,S){return-w}},"\\/1":{type_args:!1,type_result:!1,fn:function(w,S){return~w}},"abs/1":{type_args:null,type_result:null,fn:function(w,S){return Math.abs(w)}},"sign/1":{type_args:null,type_result:null,fn:function(w,S){return Math.sign(w)}},"float_integer_part/1":{type_args:!0,type_result:!1,fn:function(w,S){return parseInt(w)}},"float_fractional_part/1":{type_args:!0,type_result:!0,fn:function(w,S){return w-parseInt(w)}},"float/1":{type_args:null,type_result:!0,fn:function(w,S){return parseFloat(w)}},"floor/1":{type_args:!0,type_result:!1,fn:function(w,S){return Math.floor(w)}},"truncate/1":{type_args:!0,type_result:!1,fn:function(w,S){return parseInt(w)}},"round/1":{type_args:!0,type_result:!1,fn:function(w,S){return Math.round(w)}},"ceiling/1":{type_args:!0,type_result:!1,fn:function(w,S){return Math.ceil(w)}},"sin/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.sin(w)}},"cos/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.cos(w)}},"tan/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.tan(w)}},"asin/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.asin(w)}},"acos/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.acos(w)}},"atan/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.atan(w)}},"atan2/2":{type_args:null,type_result:!0,fn:function(w,S,y){return Math.atan2(w,S)}},"exp/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.exp(w)}},"sqrt/1":{type_args:null,type_result:!0,fn:function(w,S){return Math.sqrt(w)}},"log/1":{type_args:null,type_result:!0,fn:function(w,S){return w>0?Math.log(w):b.error.evaluation("undefined",S.__call_indicator)}},"+/2":{type_args:null,type_result:null,fn:function(w,S,y){return w+S}},"-/2":{type_args:null,type_result:null,fn:function(w,S,y){return w-S}},"*/2":{type_args:null,type_result:null,fn:function(w,S,y){return w*S}},"//2":{type_args:null,type_result:!0,fn:function(w,S,y){return S?w/S:b.error.evaluation("zero_division",y.__call_indicator)}},"///2":{type_args:!1,type_result:!1,fn:function(w,S,y){return S?parseInt(w/S):b.error.evaluation("zero_division",y.__call_indicator)}},"**/2":{type_args:null,type_result:!0,fn:function(w,S,y){return Math.pow(w,S)}},"^/2":{type_args:null,type_result:null,fn:function(w,S,y){return Math.pow(w,S)}},"<>/2":{type_args:!1,type_result:!1,fn:function(w,S,y){return w>>S}},"/\\/2":{type_args:!1,type_result:!1,fn:function(w,S,y){return w&S}},"\\//2":{type_args:!1,type_result:!1,fn:function(w,S,y){return w|S}},"xor/2":{type_args:!1,type_result:!1,fn:function(w,S,y){return w^S}},"rem/2":{type_args:!1,type_result:!1,fn:function(w,S,y){return S?w%S:b.error.evaluation("zero_division",y.__call_indicator)}},"mod/2":{type_args:!1,type_result:!1,fn:function(w,S,y){return S?w-parseInt(w/S)*S:b.error.evaluation("zero_division",y.__call_indicator)}},"max/2":{type_args:null,type_result:null,fn:function(w,S,y){return Math.max(w,S)}},"min/2":{type_args:null,type_result:null,fn:function(w,S,y){return Math.min(w,S)}}}},directive:{"dynamic/1":function(w,S){var y=S.args[0];if(b.type.is_variable(y))w.throw_error(b.error.instantiation(S.indicator));else if(!b.type.is_compound(y)||y.indicator!=="//2")w.throw_error(b.error.type("predicate_indicator",y,S.indicator));else if(b.type.is_variable(y.args[0])||b.type.is_variable(y.args[1]))w.throw_error(b.error.instantiation(S.indicator));else if(!b.type.is_atom(y.args[0]))w.throw_error(b.error.type("atom",y.args[0],S.indicator));else if(!b.type.is_integer(y.args[1]))w.throw_error(b.error.type("integer",y.args[1],S.indicator));else{var F=S.args[0].args[0].id+"/"+S.args[0].args[1].value;w.session.public_predicates[F]=!0,w.session.rules[F]||(w.session.rules[F]=[])}},"multifile/1":function(w,S){var y=S.args[0];b.type.is_variable(y)?w.throw_error(b.error.instantiation(S.indicator)):!b.type.is_compound(y)||y.indicator!=="//2"?w.throw_error(b.error.type("predicate_indicator",y,S.indicator)):b.type.is_variable(y.args[0])||b.type.is_variable(y.args[1])?w.throw_error(b.error.instantiation(S.indicator)):b.type.is_atom(y.args[0])?b.type.is_integer(y.args[1])?w.session.multifile_predicates[S.args[0].args[0].id+"/"+S.args[0].args[1].value]=!0:w.throw_error(b.error.type("integer",y.args[1],S.indicator)):w.throw_error(b.error.type("atom",y.args[0],S.indicator))},"set_prolog_flag/2":function(w,S){var y=S.args[0],F=S.args[1];b.type.is_variable(y)||b.type.is_variable(F)?w.throw_error(b.error.instantiation(S.indicator)):b.type.is_atom(y)?b.type.is_flag(y)?b.type.is_value_flag(y,F)?b.type.is_modifiable_flag(y)?w.session.flag[y.id]=F:w.throw_error(b.error.permission("modify","flag",y)):w.throw_error(b.error.domain("flag_value",new H("+",[y,F]),S.indicator)):w.throw_error(b.error.domain("prolog_flag",y,S.indicator)):w.throw_error(b.error.type("atom",y,S.indicator))},"use_module/1":function(w,S){var y=S.args[0];if(b.type.is_variable(y))w.throw_error(b.error.instantiation(S.indicator));else if(!b.type.is_term(y))w.throw_error(b.error.type("term",y,S.indicator));else if(b.type.is_module(y)){var F=y.args[0].id;e(w.session.modules,F)===-1&&w.session.modules.push(F)}},"char_conversion/2":function(w,S){var y=S.args[0],F=S.args[1];b.type.is_variable(y)||b.type.is_variable(F)?w.throw_error(b.error.instantiation(S.indicator)):b.type.is_character(y)?b.type.is_character(F)?y.id===F.id?delete w.session.__char_conversion[y.id]:w.session.__char_conversion[y.id]=F.id:w.throw_error(b.error.type("character",F,S.indicator)):w.throw_error(b.error.type("character",y,S.indicator))},"op/3":function(w,S){var y=S.args[0],F=S.args[1],J=S.args[2];if(b.type.is_variable(y)||b.type.is_variable(F)||b.type.is_variable(J))w.throw_error(b.error.instantiation(S.indicator));else if(!b.type.is_integer(y))w.throw_error(b.error.type("integer",y,S.indicator));else if(!b.type.is_atom(F))w.throw_error(b.error.type("atom",F,S.indicator));else if(!b.type.is_atom(J))w.throw_error(b.error.type("atom",J,S.indicator));else if(y.value<0||y.value>1200)w.throw_error(b.error.domain("operator_priority",y,S.indicator));else if(J.id===",")w.throw_error(b.error.permission("modify","operator",J,S.indicator));else if(J.id==="|"&&(y.value<1001||F.id.length!==3))w.throw_error(b.error.permission("modify","operator",J,S.indicator));else if(["fy","fx","yf","xf","xfx","yfx","xfy"].indexOf(F.id)===-1)w.throw_error(b.error.domain("operator_specifier",F,S.indicator));else{var X={prefix:null,infix:null,postfix:null};for(var $ in w.session.__operators)if(w.session.__operators.hasOwnProperty($)){var ie=w.session.__operators[$][J.id];ie&&(e(ie,"fx")!==-1&&(X.prefix={priority:$,type:"fx"}),e(ie,"fy")!==-1&&(X.prefix={priority:$,type:"fy"}),e(ie,"xf")!==-1&&(X.postfix={priority:$,type:"xf"}),e(ie,"yf")!==-1&&(X.postfix={priority:$,type:"yf"}),e(ie,"xfx")!==-1&&(X.infix={priority:$,type:"xfx"}),e(ie,"xfy")!==-1&&(X.infix={priority:$,type:"xfy"}),e(ie,"yfx")!==-1&&(X.infix={priority:$,type:"yfx"}))}var be;switch(F.id){case"fy":case"fx":be="prefix";break;case"yf":case"xf":be="postfix";break;default:be="infix";break}if(((X.prefix&&be==="prefix"||X.postfix&&be==="postfix"||X.infix&&be==="infix")&&X[be].type!==F.id||X.infix&&be==="postfix"||X.postfix&&be==="infix")&&y.value!==0)w.throw_error(b.error.permission("create","operator",J,S.indicator));else return X[be]&&(me(w.session.__operators[X[be].priority][J.id],F.id),w.session.__operators[X[be].priority][J.id].length===0&&delete w.session.__operators[X[be].priority][J.id]),y.value>0&&(w.session.__operators[y.value]||(w.session.__operators[y.value.toString()]={}),w.session.__operators[y.value][J.id]||(w.session.__operators[y.value][J.id]=[]),w.session.__operators[y.value][J.id].push(F.id)),!0}}},predicate:{"op/3":function(w,S,y){b.directive["op/3"](w,y)&&w.success(S)},"current_op/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2],$=[];for(var ie in w.session.__operators)for(var be in w.session.__operators[ie])for(var Re=0;Re/2"){var F=w.points,J=w.session.format_success,X=w.session.format_error;w.session.format_success=function(Re){return Re.substitution},w.session.format_error=function(Re){return Re.goal},w.points=[new ke(y.args[0].args[0],S.substitution,S)];var $=function(Re){w.points=F,w.session.format_success=J,w.session.format_error=X,Re===!1?w.prepend([new ke(S.goal.replace(y.args[1]),S.substitution,S)]):b.type.is_error(Re)?w.throw_error(Re.args[0]):Re===null?(w.prepend([S]),w.__calls.shift()(null)):w.prepend([new ke(S.goal.replace(y.args[0].args[1]).apply(Re),S.substitution.apply(Re),S)])};w.__calls.unshift($)}else{var ie=new ke(S.goal.replace(y.args[0]),S.substitution,S),be=new ke(S.goal.replace(y.args[1]),S.substitution,S);w.prepend([ie,be])}},"!/0":function(w,S,y){var F,J,X=[];for(F=S,J=null;F.parent!==null&&F.parent.goal.search(y);)if(J=F,F=F.parent,F.goal!==null){var $=F.goal.select();if($&&$.id==="call"&&$.search(y)){F=J;break}}for(var ie=w.points.length-1;ie>=0;ie--){for(var be=w.points[ie],Re=be.parent;Re!==null&&Re!==F.parent;)Re=Re.parent;Re===null&&Re!==F.parent&&X.push(be)}w.points=X.reverse(),w.success(S)},"\\+/1":function(w,S,y){var F=y.args[0];b.type.is_variable(F)?w.throw_error(b.error.instantiation(w.level)):b.type.is_callable(F)?w.prepend([new ke(S.goal.replace(new H(",",[new H(",",[new H("call",[F]),new H("!",[])]),new H("fail",[])])),S.substitution,S),new ke(S.goal.replace(null),S.substitution,S)]):w.throw_error(b.error.type("callable",F,w.level))},"->/2":function(w,S,y){var F=S.goal.replace(new H(",",[y.args[0],new H(",",[new H("!"),y.args[1]])]));w.prepend([new ke(F,S.substitution,S)])},"fail/0":function(w,S,y){},"false/0":function(w,S,y){},"true/0":function(w,S,y){w.success(S)},"call/1":ne(1),"call/2":ne(2),"call/3":ne(3),"call/4":ne(4),"call/5":ne(5),"call/6":ne(6),"call/7":ne(7),"call/8":ne(8),"once/1":function(w,S,y){var F=y.args[0];w.prepend([new ke(S.goal.replace(new H(",",[new H("call",[F]),new H("!",[])])),S.substitution,S)])},"forall/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H("\\+",[new H(",",[new H("call",[F]),new H("\\+",[new H("call",[J])])])])),S.substitution,S)])},"repeat/0":function(w,S,y){w.prepend([new ke(S.goal.replace(null),S.substitution,S),S])},"throw/1":function(w,S,y){b.type.is_variable(y.args[0])?w.throw_error(b.error.instantiation(w.level)):w.throw_error(y.args[0])},"catch/3":function(w,S,y){var F=w.points;w.points=[],w.prepend([new ke(y.args[0],S.substitution,S)]);var J=w.session.format_success,X=w.session.format_error;w.session.format_success=function(ie){return ie.substitution},w.session.format_error=function(ie){return ie.goal};var $=function(ie){var be=w.points;if(w.points=F,w.session.format_success=J,w.session.format_error=X,b.type.is_error(ie)){for(var Re=[],at=w.points.length-1;at>=0;at--){for(var tr=w.points[at],dt=tr.parent;dt!==null&&dt!==S.parent;)dt=dt.parent;dt===null&&dt!==S.parent&&Re.push(tr)}w.points=Re;var jt=w.get_flag("occurs_check").indicator==="true/0",tr=new ke,St=b.unify(ie.args[0],y.args[1],jt);St!==null?(tr.substitution=S.substitution.apply(St),tr.goal=S.goal.replace(y.args[2]).apply(St),tr.parent=S,w.prepend([tr])):w.throw_error(ie.args[0])}else if(ie!==!1){for(var ln=ie===null?[]:[new ke(S.goal.apply(ie).replace(null),S.substitution.apply(ie),S)],kr=[],at=be.length-1;at>=0;at--){kr.push(be[at]);var mr=be[at].goal!==null?be[at].goal.select():null;if(b.type.is_term(mr)&&mr.indicator==="!/0")break}var br=o(kr,function(Kr){return Kr.goal===null&&(Kr.goal=new H("true",[])),Kr=new ke(S.goal.replace(new H("catch",[Kr.goal,y.args[1],y.args[2]])),S.substitution.apply(Kr.substitution),Kr.parent),Kr.exclude=y.args[0].variables(),Kr}).reverse();w.prepend(br),w.prepend(ln),ie===null&&(this.current_limit=0,w.__calls.shift()(null))}};w.__calls.unshift($)},"=/2":function(w,S,y){var F=w.get_flag("occurs_check").indicator==="true/0",J=new ke,X=b.unify(y.args[0],y.args[1],F);X!==null&&(J.goal=S.goal.apply(X).replace(null),J.substitution=S.substitution.apply(X),J.parent=S,w.prepend([J]))},"unify_with_occurs_check/2":function(w,S,y){var F=new ke,J=b.unify(y.args[0],y.args[1],!0);J!==null&&(F.goal=S.goal.apply(J).replace(null),F.substitution=S.substitution.apply(J),F.parent=S,w.prepend([F]))},"\\=/2":function(w,S,y){var F=w.get_flag("occurs_check").indicator==="true/0",J=b.unify(y.args[0],y.args[1],F);J===null&&w.success(S)},"subsumes_term/2":function(w,S,y){var F=w.get_flag("occurs_check").indicator==="true/0",J=b.unify(y.args[1],y.args[0],F);J!==null&&y.args[1].apply(J).equals(y.args[1])&&w.success(S)},"findall/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2];if(b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(J))w.throw_error(b.error.type("callable",J,y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_list(X))w.throw_error(b.error.type("list",X,y.indicator));else{var $=w.next_free_variable(),ie=new H(",",[J,new H("=",[$,F])]),be=w.points,Re=w.session.limit,at=w.session.format_success;w.session.format_success=function(tr){return tr.substitution},w.add_goal(ie,!0,S);var dt=[],jt=function(tr){if(tr!==!1&&tr!==null&&!b.type.is_error(tr))w.__calls.unshift(jt),dt.push(tr.links[$.id]),w.session.limit=w.current_limit;else if(w.points=be,w.session.limit=Re,w.session.format_success=at,b.type.is_error(tr))w.throw_error(tr.args[0]);else if(w.current_limit>0){for(var St=new H("[]"),ln=dt.length-1;ln>=0;ln--)St=new H(".",[dt[ln],St]);w.prepend([new ke(S.goal.replace(new H("=",[X,St])),S.substitution,S)])}};w.__calls.unshift(jt)}},"bagof/3":function(w,S,y){var F,J=y.args[0],X=y.args[1],$=y.args[2];if(b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(X))w.throw_error(b.error.type("callable",X,y.indicator));else if(!b.type.is_variable($)&&!b.type.is_list($))w.throw_error(b.error.type("list",$,y.indicator));else{var ie=w.next_free_variable(),be;X.indicator==="^/2"?(be=X.args[0].variables(),X=X.args[1]):be=[],be=be.concat(J.variables());for(var Re=X.variables().filter(function(br){return e(be,br)===-1}),at=new H("[]"),dt=Re.length-1;dt>=0;dt--)at=new H(".",[new xe(Re[dt]),at]);var jt=new H(",",[X,new H("=",[ie,new H(",",[at,J])])]),tr=w.points,St=w.session.limit,ln=w.session.format_success;w.session.format_success=function(br){return br.substitution},w.add_goal(jt,!0,S);var kr=[],mr=function(br){if(br!==!1&&br!==null&&!b.type.is_error(br)){w.__calls.unshift(mr);var Kr=!1,Kn=br.links[ie.id].args[0],Ms=br.links[ie.id].args[1];for(var Ri in kr)if(kr.hasOwnProperty(Ri)){var gs=kr[Ri];if(gs.variables.equals(Kn)){gs.answers.push(Ms),Kr=!0;break}}Kr||kr.push({variables:Kn,answers:[Ms]}),w.session.limit=w.current_limit}else if(w.points=tr,w.session.limit=St,w.session.format_success=ln,b.type.is_error(br))w.throw_error(br.args[0]);else if(w.current_limit>0){for(var io=[],Pi=0;Pi=0;so--)Os=new H(".",[br[so],Os]);io.push(new ke(S.goal.replace(new H(",",[new H("=",[at,kr[Pi].variables]),new H("=",[$,Os])])),S.substitution,S))}w.prepend(io)}};w.__calls.unshift(mr)}},"setof/3":function(w,S,y){var F,J=y.args[0],X=y.args[1],$=y.args[2];if(b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(X))w.throw_error(b.error.type("callable",X,y.indicator));else if(!b.type.is_variable($)&&!b.type.is_list($))w.throw_error(b.error.type("list",$,y.indicator));else{var ie=w.next_free_variable(),be;X.indicator==="^/2"?(be=X.args[0].variables(),X=X.args[1]):be=[],be=be.concat(J.variables());for(var Re=X.variables().filter(function(br){return e(be,br)===-1}),at=new H("[]"),dt=Re.length-1;dt>=0;dt--)at=new H(".",[new xe(Re[dt]),at]);var jt=new H(",",[X,new H("=",[ie,new H(",",[at,J])])]),tr=w.points,St=w.session.limit,ln=w.session.format_success;w.session.format_success=function(br){return br.substitution},w.add_goal(jt,!0,S);var kr=[],mr=function(br){if(br!==!1&&br!==null&&!b.type.is_error(br)){w.__calls.unshift(mr);var Kr=!1,Kn=br.links[ie.id].args[0],Ms=br.links[ie.id].args[1];for(var Ri in kr)if(kr.hasOwnProperty(Ri)){var gs=kr[Ri];if(gs.variables.equals(Kn)){gs.answers.push(Ms),Kr=!0;break}}Kr||kr.push({variables:Kn,answers:[Ms]}),w.session.limit=w.current_limit}else if(w.points=tr,w.session.limit=St,w.session.format_success=ln,b.type.is_error(br))w.throw_error(br.args[0]);else if(w.current_limit>0){for(var io=[],Pi=0;Pi=0;so--)Os=new H(".",[br[so],Os]);io.push(new ke(S.goal.replace(new H(",",[new H("=",[at,kr[Pi].variables]),new H("=",[$,Os])])),S.substitution,S))}w.prepend(io)}};w.__calls.unshift(mr)}},"functor/3":function(w,S,y){var F,J=y.args[0],X=y.args[1],$=y.args[2];if(b.type.is_variable(J)&&(b.type.is_variable(X)||b.type.is_variable($)))w.throw_error(b.error.instantiation("functor/3"));else if(!b.type.is_variable($)&&!b.type.is_integer($))w.throw_error(b.error.type("integer",y.args[2],"functor/3"));else if(!b.type.is_variable(X)&&!b.type.is_atomic(X))w.throw_error(b.error.type("atomic",y.args[1],"functor/3"));else if(b.type.is_integer(X)&&b.type.is_integer($)&&$.value!==0)w.throw_error(b.error.type("atom",y.args[1],"functor/3"));else if(b.type.is_variable(J)){if(y.args[2].value>=0){for(var ie=[],be=0;be<$.value;be++)ie.push(w.next_free_variable());var Re=b.type.is_integer(X)?X:new H(X.id,ie);w.prepend([new ke(S.goal.replace(new H("=",[J,Re])),S.substitution,S)])}}else{var at=b.type.is_integer(J)?J:new H(J.id,[]),dt=b.type.is_integer(J)?new Ne(0,!1):new Ne(J.args.length,!1),jt=new H(",",[new H("=",[at,X]),new H("=",[dt,$])]);w.prepend([new ke(S.goal.replace(jt),S.substitution,S)])}},"arg/3":function(w,S,y){if(b.type.is_variable(y.args[0])||b.type.is_variable(y.args[1]))w.throw_error(b.error.instantiation(y.indicator));else if(y.args[0].value<0)w.throw_error(b.error.domain("not_less_than_zero",y.args[0],y.indicator));else if(!b.type.is_compound(y.args[1]))w.throw_error(b.error.type("compound",y.args[1],y.indicator));else{var F=y.args[0].value;if(F>0&&F<=y.args[1].args.length){var J=new H("=",[y.args[1].args[F-1],y.args[2]]);w.prepend([new ke(S.goal.replace(J),S.substitution,S)])}}},"=../2":function(w,S,y){var F;if(b.type.is_variable(y.args[0])&&(b.type.is_variable(y.args[1])||b.type.is_non_empty_list(y.args[1])&&b.type.is_variable(y.args[1].args[0])))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_fully_list(y.args[1]))w.throw_error(b.error.type("list",y.args[1],y.indicator));else if(b.type.is_variable(y.args[0])){if(!b.type.is_variable(y.args[1])){var X=[];for(F=y.args[1].args[1];F.indicator==="./2";)X.push(F.args[0]),F=F.args[1];b.type.is_variable(y.args[0])&&b.type.is_variable(F)?w.throw_error(b.error.instantiation(y.indicator)):X.length===0&&b.type.is_compound(y.args[1].args[0])?w.throw_error(b.error.type("atomic",y.args[1].args[0],y.indicator)):X.length>0&&(b.type.is_compound(y.args[1].args[0])||b.type.is_number(y.args[1].args[0]))?w.throw_error(b.error.type("atom",y.args[1].args[0],y.indicator)):X.length===0?w.prepend([new ke(S.goal.replace(new H("=",[y.args[1].args[0],y.args[0]],S)),S.substitution,S)]):w.prepend([new ke(S.goal.replace(new H("=",[new H(y.args[1].args[0].id,X),y.args[0]])),S.substitution,S)])}}else{if(b.type.is_atomic(y.args[0]))F=new H(".",[y.args[0],new H("[]")]);else{F=new H("[]");for(var J=y.args[0].args.length-1;J>=0;J--)F=new H(".",[y.args[0].args[J],F]);F=new H(".",[new H(y.args[0].id),F])}w.prepend([new ke(S.goal.replace(new H("=",[F,y.args[1]])),S.substitution,S)])}},"copy_term/2":function(w,S,y){var F=y.args[0].rename(w);w.prepend([new ke(S.goal.replace(new H("=",[F,y.args[1]])),S.substitution,S.parent)])},"term_variables/2":function(w,S,y){var F=y.args[0],J=y.args[1];if(!b.type.is_fully_list(J))w.throw_error(b.error.type("list",J,y.indicator));else{var X=g(o(we(F.variables()),function($){return new xe($)}));w.prepend([new ke(S.goal.replace(new H("=",[J,X])),S.substitution,S)])}},"clause/2":function(w,S,y){if(b.type.is_variable(y.args[0]))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(y.args[0]))w.throw_error(b.error.type("callable",y.args[0],y.indicator));else if(!b.type.is_variable(y.args[1])&&!b.type.is_callable(y.args[1]))w.throw_error(b.error.type("callable",y.args[1],y.indicator));else if(w.session.rules[y.args[0].indicator]!==void 0)if(w.is_public_predicate(y.args[0].indicator)){var F=[];for(var J in w.session.rules[y.args[0].indicator])if(w.session.rules[y.args[0].indicator].hasOwnProperty(J)){var X=w.session.rules[y.args[0].indicator][J];w.session.renamed_variables={},X=X.rename(w),X.body===null&&(X.body=new H("true"));var $=new H(",",[new H("=",[X.head,y.args[0]]),new H("=",[X.body,y.args[1]])]);F.push(new ke(S.goal.replace($),S.substitution,S))}w.prepend(F)}else w.throw_error(b.error.permission("access","private_procedure",y.args[0].indicator,y.indicator))},"current_predicate/1":function(w,S,y){var F=y.args[0];if(!b.type.is_variable(F)&&(!b.type.is_compound(F)||F.indicator!=="//2"))w.throw_error(b.error.type("predicate_indicator",F,y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_variable(F.args[0])&&!b.type.is_atom(F.args[0]))w.throw_error(b.error.type("atom",F.args[0],y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_variable(F.args[1])&&!b.type.is_integer(F.args[1]))w.throw_error(b.error.type("integer",F.args[1],y.indicator));else{var J=[];for(var X in w.session.rules)if(w.session.rules.hasOwnProperty(X)){var $=X.lastIndexOf("/"),ie=X.substr(0,$),be=parseInt(X.substr($+1,X.length-($+1))),Re=new H("/",[new H(ie),new Ne(be,!1)]),at=new H("=",[Re,F]);J.push(new ke(S.goal.replace(at),S.substitution,S))}w.prepend(J)}},"asserta/1":function(w,S,y){if(b.type.is_variable(y.args[0]))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(y.args[0]))w.throw_error(b.error.type("callable",y.args[0],y.indicator));else{var F,J;y.args[0].indicator===":-/2"?(F=y.args[0].args[0],J=Ee(y.args[0].args[1])):(F=y.args[0],J=null),b.type.is_callable(F)?J!==null&&!b.type.is_callable(J)?w.throw_error(b.error.type("callable",J,y.indicator)):w.is_public_predicate(F.indicator)?(w.session.rules[F.indicator]===void 0&&(w.session.rules[F.indicator]=[]),w.session.public_predicates[F.indicator]=!0,w.session.rules[F.indicator]=[new Ye(F,J,!0)].concat(w.session.rules[F.indicator]),w.success(S)):w.throw_error(b.error.permission("modify","static_procedure",F.indicator,y.indicator)):w.throw_error(b.error.type("callable",F,y.indicator))}},"assertz/1":function(w,S,y){if(b.type.is_variable(y.args[0]))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(y.args[0]))w.throw_error(b.error.type("callable",y.args[0],y.indicator));else{var F,J;y.args[0].indicator===":-/2"?(F=y.args[0].args[0],J=Ee(y.args[0].args[1])):(F=y.args[0],J=null),b.type.is_callable(F)?J!==null&&!b.type.is_callable(J)?w.throw_error(b.error.type("callable",J,y.indicator)):w.is_public_predicate(F.indicator)?(w.session.rules[F.indicator]===void 0&&(w.session.rules[F.indicator]=[]),w.session.public_predicates[F.indicator]=!0,w.session.rules[F.indicator].push(new Ye(F,J,!0)),w.success(S)):w.throw_error(b.error.permission("modify","static_procedure",F.indicator,y.indicator)):w.throw_error(b.error.type("callable",F,y.indicator))}},"retract/1":function(w,S,y){if(b.type.is_variable(y.args[0]))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_callable(y.args[0]))w.throw_error(b.error.type("callable",y.args[0],y.indicator));else{var F,J;if(y.args[0].indicator===":-/2"?(F=y.args[0].args[0],J=y.args[0].args[1]):(F=y.args[0],J=new H("true")),typeof S.retract>"u")if(w.is_public_predicate(F.indicator)){if(w.session.rules[F.indicator]!==void 0){for(var X=[],$=0;$w.get_flag("max_arity").value)w.throw_error(b.error.representation("max_arity",y.indicator));else{var F=y.args[0].args[0].id+"/"+y.args[0].args[1].value;w.is_public_predicate(F)?(delete w.session.rules[F],w.success(S)):w.throw_error(b.error.permission("modify","static_procedure",F,y.indicator))}},"atom_length/2":function(w,S,y){if(b.type.is_variable(y.args[0]))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_atom(y.args[0]))w.throw_error(b.error.type("atom",y.args[0],y.indicator));else if(!b.type.is_variable(y.args[1])&&!b.type.is_integer(y.args[1]))w.throw_error(b.error.type("integer",y.args[1],y.indicator));else if(b.type.is_integer(y.args[1])&&y.args[1].value<0)w.throw_error(b.error.domain("not_less_than_zero",y.args[1],y.indicator));else{var F=new Ne(y.args[0].id.length,!1);w.prepend([new ke(S.goal.replace(new H("=",[F,y.args[1]])),S.substitution,S)])}},"atom_concat/3":function(w,S,y){var F,J,X=y.args[0],$=y.args[1],ie=y.args[2];if(b.type.is_variable(ie)&&(b.type.is_variable(X)||b.type.is_variable($)))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_atom(X))w.throw_error(b.error.type("atom",X,y.indicator));else if(!b.type.is_variable($)&&!b.type.is_atom($))w.throw_error(b.error.type("atom",$,y.indicator));else if(!b.type.is_variable(ie)&&!b.type.is_atom(ie))w.throw_error(b.error.type("atom",ie,y.indicator));else{var be=b.type.is_variable(X),Re=b.type.is_variable($);if(!be&&!Re)J=new H("=",[ie,new H(X.id+$.id)]),w.prepend([new ke(S.goal.replace(J),S.substitution,S)]);else if(be&&!Re)F=ie.id.substr(0,ie.id.length-$.id.length),F+$.id===ie.id&&(J=new H("=",[X,new H(F)]),w.prepend([new ke(S.goal.replace(J),S.substitution,S)]));else if(Re&&!be)F=ie.id.substr(X.id.length),X.id+F===ie.id&&(J=new H("=",[$,new H(F)]),w.prepend([new ke(S.goal.replace(J),S.substitution,S)]));else{for(var at=[],dt=0;dt<=ie.id.length;dt++){var jt=new H(ie.id.substr(0,dt)),tr=new H(ie.id.substr(dt));J=new H(",",[new H("=",[jt,X]),new H("=",[tr,$])]),at.push(new ke(S.goal.replace(J),S.substitution,S))}w.prepend(at)}}},"sub_atom/5":function(w,S,y){var F,J=y.args[0],X=y.args[1],$=y.args[2],ie=y.args[3],be=y.args[4];if(b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_integer(X))w.throw_error(b.error.type("integer",X,y.indicator));else if(!b.type.is_variable($)&&!b.type.is_integer($))w.throw_error(b.error.type("integer",$,y.indicator));else if(!b.type.is_variable(ie)&&!b.type.is_integer(ie))w.throw_error(b.error.type("integer",ie,y.indicator));else if(b.type.is_integer(X)&&X.value<0)w.throw_error(b.error.domain("not_less_than_zero",X,y.indicator));else if(b.type.is_integer($)&&$.value<0)w.throw_error(b.error.domain("not_less_than_zero",$,y.indicator));else if(b.type.is_integer(ie)&&ie.value<0)w.throw_error(b.error.domain("not_less_than_zero",ie,y.indicator));else{var Re=[],at=[],dt=[];if(b.type.is_variable(X))for(F=0;F<=J.id.length;F++)Re.push(F);else Re.push(X.value);if(b.type.is_variable($))for(F=0;F<=J.id.length;F++)at.push(F);else at.push($.value);if(b.type.is_variable(ie))for(F=0;F<=J.id.length;F++)dt.push(F);else dt.push(ie.value);var jt=[];for(var tr in Re)if(Re.hasOwnProperty(tr)){F=Re[tr];for(var St in at)if(at.hasOwnProperty(St)){var ln=at[St],kr=J.id.length-F-ln;if(e(dt,kr)!==-1&&F+ln+kr===J.id.length){var mr=J.id.substr(F,ln);if(J.id===J.id.substr(0,F)+mr+J.id.substr(F+ln,kr)){var br=new H("=",[new H(mr),be]),Kr=new H("=",[X,new Ne(F)]),Kn=new H("=",[$,new Ne(ln)]),Ms=new H("=",[ie,new Ne(kr)]),Ri=new H(",",[new H(",",[new H(",",[Kr,Kn]),Ms]),br]);jt.push(new ke(S.goal.replace(Ri),S.substitution,S))}}}}w.prepend(jt)}},"atom_chars/2":function(w,S,y){var F=y.args[0],J=y.args[1];if(b.type.is_variable(F)&&b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_atom(F))w.throw_error(b.error.type("atom",F,y.indicator));else if(b.type.is_variable(F)){for(var ie=J,be=b.type.is_variable(F),Re="";ie.indicator==="./2";){if(b.type.is_character(ie.args[0]))Re+=ie.args[0].id;else if(b.type.is_variable(ie.args[0])&&be){w.throw_error(b.error.instantiation(y.indicator));return}else if(!b.type.is_variable(ie.args[0])){w.throw_error(b.error.type("character",ie.args[0],y.indicator));return}ie=ie.args[1]}b.type.is_variable(ie)&&be?w.throw_error(b.error.instantiation(y.indicator)):!b.type.is_empty_list(ie)&&!b.type.is_variable(ie)?w.throw_error(b.error.type("list",J,y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[new H(Re),F])),S.substitution,S)])}else{for(var X=new H("[]"),$=F.id.length-1;$>=0;$--)X=new H(".",[new H(F.id.charAt($)),X]);w.prepend([new ke(S.goal.replace(new H("=",[J,X])),S.substitution,S)])}},"atom_codes/2":function(w,S,y){var F=y.args[0],J=y.args[1];if(b.type.is_variable(F)&&b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_atom(F))w.throw_error(b.error.type("atom",F,y.indicator));else if(b.type.is_variable(F)){for(var ie=J,be=b.type.is_variable(F),Re="";ie.indicator==="./2";){if(b.type.is_character_code(ie.args[0]))Re+=u(ie.args[0].value);else if(b.type.is_variable(ie.args[0])&&be){w.throw_error(b.error.instantiation(y.indicator));return}else if(!b.type.is_variable(ie.args[0])){w.throw_error(b.error.representation("character_code",y.indicator));return}ie=ie.args[1]}b.type.is_variable(ie)&&be?w.throw_error(b.error.instantiation(y.indicator)):!b.type.is_empty_list(ie)&&!b.type.is_variable(ie)?w.throw_error(b.error.type("list",J,y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[new H(Re),F])),S.substitution,S)])}else{for(var X=new H("[]"),$=F.id.length-1;$>=0;$--)X=new H(".",[new Ne(n(F.id,$),!1),X]);w.prepend([new ke(S.goal.replace(new H("=",[J,X])),S.substitution,S)])}},"char_code/2":function(w,S,y){var F=y.args[0],J=y.args[1];if(b.type.is_variable(F)&&b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_character(F))w.throw_error(b.error.type("character",F,y.indicator));else if(!b.type.is_variable(J)&&!b.type.is_integer(J))w.throw_error(b.error.type("integer",J,y.indicator));else if(!b.type.is_variable(J)&&!b.type.is_character_code(J))w.throw_error(b.error.representation("character_code",y.indicator));else if(b.type.is_variable(J)){var X=new Ne(n(F.id,0),!1);w.prepend([new ke(S.goal.replace(new H("=",[X,J])),S.substitution,S)])}else{var $=new H(u(J.value));w.prepend([new ke(S.goal.replace(new H("=",[$,F])),S.substitution,S)])}},"number_chars/2":function(w,S,y){var F,J=y.args[0],X=y.args[1];if(b.type.is_variable(J)&&b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(J)&&!b.type.is_number(J))w.throw_error(b.error.type("number",J,y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_list(X))w.throw_error(b.error.type("list",X,y.indicator));else{var $=b.type.is_variable(J);if(!b.type.is_variable(X)){var ie=X,be=!0;for(F="";ie.indicator==="./2";){if(b.type.is_character(ie.args[0]))F+=ie.args[0].id;else if(b.type.is_variable(ie.args[0]))be=!1;else if(!b.type.is_variable(ie.args[0])){w.throw_error(b.error.type("character",ie.args[0],y.indicator));return}ie=ie.args[1]}if(be=be&&b.type.is_empty_list(ie),!b.type.is_empty_list(ie)&&!b.type.is_variable(ie)){w.throw_error(b.error.type("list",X,y.indicator));return}if(!be&&$){w.throw_error(b.error.instantiation(y.indicator));return}else if(be)if(b.type.is_variable(ie)&&$){w.throw_error(b.error.instantiation(y.indicator));return}else{var Re=w.parse(F),at=Re.value;!b.type.is_number(at)||Re.tokens[Re.tokens.length-1].space?w.throw_error(b.error.syntax_by_predicate("parseable_number",y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[J,at])),S.substitution,S)]);return}}if(!$){F=J.toString();for(var dt=new H("[]"),jt=F.length-1;jt>=0;jt--)dt=new H(".",[new H(F.charAt(jt)),dt]);w.prepend([new ke(S.goal.replace(new H("=",[X,dt])),S.substitution,S)])}}},"number_codes/2":function(w,S,y){var F,J=y.args[0],X=y.args[1];if(b.type.is_variable(J)&&b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(J)&&!b.type.is_number(J))w.throw_error(b.error.type("number",J,y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_list(X))w.throw_error(b.error.type("list",X,y.indicator));else{var $=b.type.is_variable(J);if(!b.type.is_variable(X)){var ie=X,be=!0;for(F="";ie.indicator==="./2";){if(b.type.is_character_code(ie.args[0]))F+=u(ie.args[0].value);else if(b.type.is_variable(ie.args[0]))be=!1;else if(!b.type.is_variable(ie.args[0])){w.throw_error(b.error.type("character_code",ie.args[0],y.indicator));return}ie=ie.args[1]}if(be=be&&b.type.is_empty_list(ie),!b.type.is_empty_list(ie)&&!b.type.is_variable(ie)){w.throw_error(b.error.type("list",X,y.indicator));return}if(!be&&$){w.throw_error(b.error.instantiation(y.indicator));return}else if(be)if(b.type.is_variable(ie)&&$){w.throw_error(b.error.instantiation(y.indicator));return}else{var Re=w.parse(F),at=Re.value;!b.type.is_number(at)||Re.tokens[Re.tokens.length-1].space?w.throw_error(b.error.syntax_by_predicate("parseable_number",y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[J,at])),S.substitution,S)]);return}}if(!$){F=J.toString();for(var dt=new H("[]"),jt=F.length-1;jt>=0;jt--)dt=new H(".",[new Ne(n(F,jt),!1),dt]);w.prepend([new ke(S.goal.replace(new H("=",[X,dt])),S.substitution,S)])}}},"upcase_atom/2":function(w,S,y){var F=y.args[0],J=y.args[1];b.type.is_variable(F)?w.throw_error(b.error.instantiation(y.indicator)):b.type.is_atom(F)?!b.type.is_variable(J)&&!b.type.is_atom(J)?w.throw_error(b.error.type("atom",J,y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[J,new H(F.id.toUpperCase(),[])])),S.substitution,S)]):w.throw_error(b.error.type("atom",F,y.indicator))},"downcase_atom/2":function(w,S,y){var F=y.args[0],J=y.args[1];b.type.is_variable(F)?w.throw_error(b.error.instantiation(y.indicator)):b.type.is_atom(F)?!b.type.is_variable(J)&&!b.type.is_atom(J)?w.throw_error(b.error.type("atom",J,y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[J,new H(F.id.toLowerCase(),[])])),S.substitution,S)]):w.throw_error(b.error.type("atom",F,y.indicator))},"atomic_list_concat/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H("atomic_list_concat",[F,new H("",[]),J])),S.substitution,S)])},"atomic_list_concat/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2];if(b.type.is_variable(J)||b.type.is_variable(F)&&b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_list(F))w.throw_error(b.error.type("list",F,y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_atom(X))w.throw_error(b.error.type("atom",X,y.indicator));else if(b.type.is_variable(X)){for(var ie="",be=F;b.type.is_term(be)&&be.indicator==="./2";){if(!b.type.is_atom(be.args[0])&&!b.type.is_number(be.args[0])){w.throw_error(b.error.type("atomic",be.args[0],y.indicator));return}ie!==""&&(ie+=J.id),b.type.is_atom(be.args[0])?ie+=be.args[0].id:ie+=""+be.args[0].value,be=be.args[1]}ie=new H(ie,[]),b.type.is_variable(be)?w.throw_error(b.error.instantiation(y.indicator)):!b.type.is_term(be)||be.indicator!=="[]/0"?w.throw_error(b.error.type("list",F,y.indicator)):w.prepend([new ke(S.goal.replace(new H("=",[ie,X])),S.substitution,S)])}else{var $=g(o(X.id.split(J.id),function(Re){return new H(Re,[])}));w.prepend([new ke(S.goal.replace(new H("=",[$,F])),S.substitution,S)])}},"@=/2":function(w,S,y){b.compare(y.args[0],y.args[1])>0&&w.success(S)},"@>=/2":function(w,S,y){b.compare(y.args[0],y.args[1])>=0&&w.success(S)},"compare/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2];if(!b.type.is_variable(F)&&!b.type.is_atom(F))w.throw_error(b.error.type("atom",F,y.indicator));else if(b.type.is_atom(F)&&["<",">","="].indexOf(F.id)===-1)w.throw_error(b.type.domain("order",F,y.indicator));else{var $=b.compare(J,X);$=$===0?"=":$===-1?"<":">",w.prepend([new ke(S.goal.replace(new H("=",[F,new H($,[])])),S.substitution,S)])}},"is/2":function(w,S,y){var F=y.args[1].interpret(w);b.type.is_number(F)?w.prepend([new ke(S.goal.replace(new H("=",[y.args[0],F],w.level)),S.substitution,S)]):w.throw_error(F)},"between/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2];if(b.type.is_variable(F)||b.type.is_variable(J))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_integer(F))w.throw_error(b.error.type("integer",F,y.indicator));else if(!b.type.is_integer(J))w.throw_error(b.error.type("integer",J,y.indicator));else if(!b.type.is_variable(X)&&!b.type.is_integer(X))w.throw_error(b.error.type("integer",X,y.indicator));else if(b.type.is_variable(X)){var $=[new ke(S.goal.replace(new H("=",[X,F])),S.substitution,S)];F.value=X.value&&w.success(S)},"succ/2":function(w,S,y){var F=y.args[0],J=y.args[1];b.type.is_variable(F)&&b.type.is_variable(J)?w.throw_error(b.error.instantiation(y.indicator)):!b.type.is_variable(F)&&!b.type.is_integer(F)?w.throw_error(b.error.type("integer",F,y.indicator)):!b.type.is_variable(J)&&!b.type.is_integer(J)?w.throw_error(b.error.type("integer",J,y.indicator)):!b.type.is_variable(F)&&F.value<0?w.throw_error(b.error.domain("not_less_than_zero",F,y.indicator)):!b.type.is_variable(J)&&J.value<0?w.throw_error(b.error.domain("not_less_than_zero",J,y.indicator)):(b.type.is_variable(J)||J.value>0)&&(b.type.is_variable(F)?w.prepend([new ke(S.goal.replace(new H("=",[F,new Ne(J.value-1,!1)])),S.substitution,S)]):w.prepend([new ke(S.goal.replace(new H("=",[J,new Ne(F.value+1,!1)])),S.substitution,S)]))},"=:=/2":function(w,S,y){var F=b.arithmetic_compare(w,y.args[0],y.args[1]);b.type.is_term(F)?w.throw_error(F):F===0&&w.success(S)},"=\\=/2":function(w,S,y){var F=b.arithmetic_compare(w,y.args[0],y.args[1]);b.type.is_term(F)?w.throw_error(F):F!==0&&w.success(S)},"/2":function(w,S,y){var F=b.arithmetic_compare(w,y.args[0],y.args[1]);b.type.is_term(F)?w.throw_error(F):F>0&&w.success(S)},">=/2":function(w,S,y){var F=b.arithmetic_compare(w,y.args[0],y.args[1]);b.type.is_term(F)?w.throw_error(F):F>=0&&w.success(S)},"var/1":function(w,S,y){b.type.is_variable(y.args[0])&&w.success(S)},"atom/1":function(w,S,y){b.type.is_atom(y.args[0])&&w.success(S)},"atomic/1":function(w,S,y){b.type.is_atomic(y.args[0])&&w.success(S)},"compound/1":function(w,S,y){b.type.is_compound(y.args[0])&&w.success(S)},"integer/1":function(w,S,y){b.type.is_integer(y.args[0])&&w.success(S)},"float/1":function(w,S,y){b.type.is_float(y.args[0])&&w.success(S)},"number/1":function(w,S,y){b.type.is_number(y.args[0])&&w.success(S)},"nonvar/1":function(w,S,y){b.type.is_variable(y.args[0])||w.success(S)},"ground/1":function(w,S,y){y.variables().length===0&&w.success(S)},"acyclic_term/1":function(w,S,y){for(var F=S.substitution.apply(S.substitution),J=y.args[0].variables(),X=0;X0?St[St.length-1]:null,St!==null&&(jt=z(w,St,0,w.__get_max_priority(),!1))}if(jt.type===p&&jt.len===St.length-1&&ln.value==="."){jt=jt.value.rename(w);var kr=new H("=",[J,jt]);if(ie.variables){var mr=g(o(we(jt.variables()),function(br){return new xe(br)}));kr=new H(",",[kr,new H("=",[ie.variables,mr])])}if(ie.variable_names){var mr=g(o(we(jt.variables()),function(Kr){var Kn;for(Kn in w.session.renamed_variables)if(w.session.renamed_variables.hasOwnProperty(Kn)&&w.session.renamed_variables[Kn]===Kr)break;return new H("=",[new H(Kn,[]),new xe(Kr)])}));kr=new H(",",[kr,new H("=",[ie.variable_names,mr])])}if(ie.singletons){var mr=g(o(new Ye(jt,null).singleton_variables(),function(Kr){var Kn;for(Kn in w.session.renamed_variables)if(w.session.renamed_variables.hasOwnProperty(Kn)&&w.session.renamed_variables[Kn]===Kr)break;return new H("=",[new H(Kn,[]),new xe(Kr)])}));kr=new H(",",[kr,new H("=",[ie.singletons,mr])])}w.prepend([new ke(S.goal.replace(kr),S.substitution,S)])}else jt.type===p?w.throw_error(b.error.syntax(St[jt.len],"unexpected token",!1)):w.throw_error(jt.value)}}},"write/1":function(w,S,y){var F=y.args[0];w.prepend([new ke(S.goal.replace(new H(",",[new H("current_output",[new xe("S")]),new H("write",[new xe("S"),F])])),S.substitution,S)])},"write/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H("write_term",[F,J,new H(".",[new H("quoted",[new H("false",[])]),new H(".",[new H("ignore_ops",[new H("false")]),new H(".",[new H("numbervars",[new H("true")]),new H("[]",[])])])])])),S.substitution,S)])},"writeq/1":function(w,S,y){var F=y.args[0];w.prepend([new ke(S.goal.replace(new H(",",[new H("current_output",[new xe("S")]),new H("writeq",[new xe("S"),F])])),S.substitution,S)])},"writeq/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H("write_term",[F,J,new H(".",[new H("quoted",[new H("true",[])]),new H(".",[new H("ignore_ops",[new H("false")]),new H(".",[new H("numbervars",[new H("true")]),new H("[]",[])])])])])),S.substitution,S)])},"write_canonical/1":function(w,S,y){var F=y.args[0];w.prepend([new ke(S.goal.replace(new H(",",[new H("current_output",[new xe("S")]),new H("write_canonical",[new xe("S"),F])])),S.substitution,S)])},"write_canonical/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H("write_term",[F,J,new H(".",[new H("quoted",[new H("true",[])]),new H(".",[new H("ignore_ops",[new H("true")]),new H(".",[new H("numbervars",[new H("false")]),new H("[]",[])])])])])),S.substitution,S)])},"write_term/2":function(w,S,y){var F=y.args[0],J=y.args[1];w.prepend([new ke(S.goal.replace(new H(",",[new H("current_output",[new xe("S")]),new H("write_term",[new xe("S"),F,J])])),S.substitution,S)])},"write_term/3":function(w,S,y){var F=y.args[0],J=y.args[1],X=y.args[2],$=b.type.is_stream(F)?F:w.get_stream_by_alias(F.id);if(b.type.is_variable(F)||b.type.is_variable(X))w.throw_error(b.error.instantiation(y.indicator));else if(!b.type.is_list(X))w.throw_error(b.error.type("list",X,y.indicator));else if(!b.type.is_stream(F)&&!b.type.is_atom(F))w.throw_error(b.error.domain("stream_or_alias",F,y.indicator));else if(!b.type.is_stream($)||$.stream===null)w.throw_error(b.error.existence("stream",F,y.indicator));else if($.input)w.throw_error(b.error.permission("output","stream",F,y.indicator));else if($.type==="binary")w.throw_error(b.error.permission("output","binary_stream",F,y.indicator));else if($.position==="past_end_of_stream"&&$.eof_action==="error")w.throw_error(b.error.permission("output","past_end_of_stream",F,y.indicator));else{for(var ie={},be=X,Re;b.type.is_term(be)&&be.indicator==="./2";){if(Re=be.args[0],b.type.is_variable(Re)){w.throw_error(b.error.instantiation(y.indicator));return}else if(!b.type.is_write_option(Re)){w.throw_error(b.error.domain("write_option",Re,y.indicator));return}ie[Re.id]=Re.args[0].id==="true",be=be.args[1]}if(be.indicator!=="[]/0"){b.type.is_variable(be)?w.throw_error(b.error.instantiation(y.indicator)):w.throw_error(b.error.type("list",X,y.indicator));return}else{ie.session=w.session;var at=J.toString(ie);$.stream.put(at,$.position),typeof $.position=="number"&&($.position+=at.length),w.success(S)}}},"halt/0":function(w,S,y){w.points=[]},"halt/1":function(w,S,y){var F=y.args[0];b.type.is_variable(F)?w.throw_error(b.error.instantiation(y.indicator)):b.type.is_integer(F)?w.points=[]:w.throw_error(b.error.type("integer",F,y.indicator))},"current_prolog_flag/2":function(w,S,y){var F=y.args[0],J=y.args[1];if(!b.type.is_variable(F)&&!b.type.is_atom(F))w.throw_error(b.error.type("atom",F,y.indicator));else if(!b.type.is_variable(F)&&!b.type.is_flag(F))w.throw_error(b.error.domain("prolog_flag",F,y.indicator));else{var X=[];for(var $ in b.flag)if(b.flag.hasOwnProperty($)){var ie=new H(",",[new H("=",[new H($),F]),new H("=",[w.get_flag($),J])]);X.push(new ke(S.goal.replace(ie),S.substitution,S))}w.prepend(X)}},"set_prolog_flag/2":function(w,S,y){var F=y.args[0],J=y.args[1];b.type.is_variable(F)||b.type.is_variable(J)?w.throw_error(b.error.instantiation(y.indicator)):b.type.is_atom(F)?b.type.is_flag(F)?b.type.is_value_flag(F,J)?b.type.is_modifiable_flag(F)?(w.session.flag[F.id]=J,w.success(S)):w.throw_error(b.error.permission("modify","flag",F)):w.throw_error(b.error.domain("flag_value",new H("+",[F,J]),y.indicator)):w.throw_error(b.error.domain("prolog_flag",F,y.indicator)):w.throw_error(b.error.type("atom",F,y.indicator))}},flag:{bounded:{allowed:[new H("true"),new H("false")],value:new H("true"),changeable:!1},max_integer:{allowed:[new Ne(Number.MAX_SAFE_INTEGER)],value:new Ne(Number.MAX_SAFE_INTEGER),changeable:!1},min_integer:{allowed:[new Ne(Number.MIN_SAFE_INTEGER)],value:new Ne(Number.MIN_SAFE_INTEGER),changeable:!1},integer_rounding_function:{allowed:[new H("down"),new H("toward_zero")],value:new H("toward_zero"),changeable:!1},char_conversion:{allowed:[new H("on"),new H("off")],value:new H("on"),changeable:!0},debug:{allowed:[new H("on"),new H("off")],value:new H("off"),changeable:!0},max_arity:{allowed:[new H("unbounded")],value:new H("unbounded"),changeable:!1},unknown:{allowed:[new H("error"),new H("fail"),new H("warning")],value:new H("error"),changeable:!0},double_quotes:{allowed:[new H("chars"),new H("codes"),new H("atom")],value:new H("codes"),changeable:!0},occurs_check:{allowed:[new H("false"),new H("true")],value:new H("false"),changeable:!0},dialect:{allowed:[new H("tau")],value:new H("tau"),changeable:!1},version_data:{allowed:[new H("tau",[new Ne(t.major,!1),new Ne(t.minor,!1),new Ne(t.patch,!1),new H(t.status)])],value:new H("tau",[new Ne(t.major,!1),new Ne(t.minor,!1),new Ne(t.patch,!1),new H(t.status)]),changeable:!1},nodejs:{allowed:[new H("yes"),new H("no")],value:new H(typeof gl<"u"&&gl.exports?"yes":"no"),changeable:!1}},unify:function(w,S,y){y=y===void 0?!1:y;for(var F=[{left:w,right:S}],J={};F.length!==0;){var X=F.pop();if(w=X.left,S=X.right,b.type.is_term(w)&&b.type.is_term(S)){if(w.indicator!==S.indicator)return null;for(var $=0;$J.value?1:0:J}else return F},operate:function(w,S){if(b.type.is_operator(S)){for(var y=b.type.is_operator(S),F=[],J,X=!1,$=0;$w.get_flag("max_integer").value||J0?w.start+w.matches[0].length:w.start,J=y?new H("token_not_found"):new H("found",[new H(w.value.toString())]),X=new H(".",[new H("line",[new Ne(w.line+1)]),new H(".",[new H("column",[new Ne(F+1)]),new H(".",[J,new H("[]",[])])])]);return new H("error",[new H("syntax_error",[new H(S)]),X])},syntax_by_predicate:function(w,S){return new H("error",[new H("syntax_error",[new H(w)]),Z(S)])}},warning:{singleton:function(w,S,y){for(var F=new H("[]"),J=w.length-1;J>=0;J--)F=new H(".",[new xe(w[J]),F]);return new H("warning",[new H("singleton_variables",[F,Z(S)]),new H(".",[new H("line",[new Ne(y,!1)]),new H("[]")])])},failed_goal:function(w,S){return new H("warning",[new H("failed_goal",[w]),new H(".",[new H("line",[new Ne(S,!1)]),new H("[]")])])}},format_variable:function(w){return"_"+w},format_answer:function(w,S,F){S instanceof Se&&(S=S.thread);var F=F||{};if(F.session=S?S.session:void 0,b.type.is_error(w))return"uncaught exception: "+w.args[0].toString();if(w===!1)return"false.";if(w===null)return"limit exceeded ;";var J=0,X="";if(b.type.is_substitution(w)){var $=w.domain(!0);w=w.filter(function(Re,at){return!b.type.is_variable(at)||$.indexOf(at.id)!==-1&&Re!==at.id})}for(var ie in w.links)w.links.hasOwnProperty(ie)&&(J++,X!==""&&(X+=", "),X+=ie.toString(F)+" = "+w.links[ie].toString(F));var be=typeof S>"u"||S.points.length>0?" ;":".";return J===0?"true"+be:X+be},flatten_error:function(w){if(!b.type.is_error(w))return null;w=w.args[0];var S={};return S.type=w.args[0].id,S.thrown=S.type==="syntax_error"?null:w.args[1].id,S.expected=null,S.found=null,S.representation=null,S.existence=null,S.existence_type=null,S.line=null,S.column=null,S.permission_operation=null,S.permission_type=null,S.evaluation_type=null,S.type==="type_error"||S.type==="domain_error"?(S.expected=w.args[0].args[0].id,S.found=w.args[0].args[1].toString()):S.type==="syntax_error"?w.args[1].indicator==="./2"?(S.expected=w.args[0].args[0].id,S.found=w.args[1].args[1].args[1].args[0],S.found=S.found.id==="token_not_found"?S.found.id:S.found.args[0].id,S.line=w.args[1].args[0].args[0].value,S.column=w.args[1].args[1].args[0].args[0].value):S.thrown=w.args[1].id:S.type==="permission_error"?(S.found=w.args[0].args[2].toString(),S.permission_operation=w.args[0].args[0].id,S.permission_type=w.args[0].args[1].id):S.type==="evaluation_error"?S.evaluation_type=w.args[0].args[0].id:S.type==="representation_error"?S.representation=w.args[0].args[0].id:S.type==="existence_error"&&(S.existence=w.args[0].args[1].toString(),S.existence_type=w.args[0].args[0].id),S},create:function(w){return new b.type.Session(w)}};typeof gl<"u"?gl.exports=b:window.pl=b})()});function cme(t,e,r){t.prepend(r.map(o=>new La.default.type.State(e.goal.replace(o),e.substitution,e)))}function fH(t){let e=Ame.get(t.session);if(e==null)throw new Error("Assertion failed: A project should have been registered for the active session");return e}function fme(t,e){Ame.set(t,e),t.consult(`:- use_module(library(${rdt.id})).`)}var pH,La,ume,Qh,edt,tdt,Ame,rdt,pme=Et(()=>{Ge();pH=Ze(e2()),La=Ze(AH()),ume=Ze(ve("vm")),{is_atom:Qh,is_variable:edt,is_instantiated_list:tdt}=La.default.type;Ame=new WeakMap;rdt=new La.default.type.Module("constraints",{"project_workspaces_by_descriptor/3":(t,e,r)=>{let[o,a,n]=r.args;if(!Qh(o)||!Qh(a)){t.throw_error(La.default.error.instantiation(r.indicator));return}let u=G.parseIdent(o.id),A=G.makeDescriptor(u,a.id),h=fH(t).tryWorkspaceByDescriptor(A);edt(n)&&h!==null&&cme(t,e,[new La.default.type.Term("=",[n,new La.default.type.Term(String(h.relativeCwd))])]),Qh(n)&&h!==null&&h.relativeCwd===n.id&&t.success(e)},"workspace_field/3":(t,e,r)=>{let[o,a,n]=r.args;if(!Qh(o)||!Qh(a)){t.throw_error(La.default.error.instantiation(r.indicator));return}let A=fH(t).tryWorkspaceByCwd(o.id);if(A==null)return;let p=(0,pH.default)(A.manifest.raw,a.id);typeof p>"u"||cme(t,e,[new La.default.type.Term("=",[n,new La.default.type.Term(typeof p=="object"?JSON.stringify(p):p)])])},"workspace_field_test/3":(t,e,r)=>{let[o,a,n]=r.args;t.prepend([new La.default.type.State(e.goal.replace(new La.default.type.Term("workspace_field_test",[o,a,n,new La.default.type.Term("[]",[])])),e.substitution,e)])},"workspace_field_test/4":(t,e,r)=>{let[o,a,n,u]=r.args;if(!Qh(o)||!Qh(a)||!Qh(n)||!tdt(u)){t.throw_error(La.default.error.instantiation(r.indicator));return}let p=fH(t).tryWorkspaceByCwd(o.id);if(p==null)return;let h=(0,pH.default)(p.manifest.raw,a.id);if(typeof h>"u")return;let E={$$:h};for(let[v,x]of u.toJavaScript().entries())E[`$${v}`]=x;ume.default.runInNewContext(n.id,E)&&t.success(e)}},["project_workspaces_by_descriptor/3","workspace_field/3","workspace_field_test/3","workspace_field_test/4"])});var A2={};Vt(A2,{Constraints:()=>gH,DependencyType:()=>mme});function to(t){if(t instanceof NE.default.type.Num)return t.value;if(t instanceof NE.default.type.Term)switch(t.indicator){case"throw/1":return to(t.args[0]);case"error/1":return to(t.args[0]);case"error/2":if(t.args[0]instanceof NE.default.type.Term&&t.args[0].indicator==="syntax_error/1")return Object.assign(to(t.args[0]),...to(t.args[1]));{let e=to(t.args[0]);return e.message+=` (in ${to(t.args[1])})`,e}case"syntax_error/1":return new Jt(43,`Syntax error: ${to(t.args[0])}`);case"existence_error/2":return new Jt(44,`Existence error: ${to(t.args[0])} ${to(t.args[1])} not found`);case"instantiation_error/0":return new Jt(75,"Instantiation error: an argument is variable when an instantiated argument was expected");case"line/1":return{line:to(t.args[0])};case"column/1":return{column:to(t.args[0])};case"found/1":return{found:to(t.args[0])};case"./2":return[to(t.args[0])].concat(to(t.args[1]));case"//2":return`${to(t.args[0])}/${to(t.args[1])}`;default:return t.id}throw`couldn't pretty print because of unsupported node ${t}`}function gme(t){let e;try{e=to(t)}catch(r){throw typeof r=="string"?new Jt(42,`Unknown error: ${t} (note: ${r})`):r}return typeof e.line<"u"&&typeof e.column<"u"&&(e.message+=` at line ${e.line}, column ${e.column}`),e}function Gg(t){return t.id==="null"?null:`${t.toJavaScript()}`}function ndt(t){if(t.id==="null")return null;{let e=t.toJavaScript();if(typeof e!="string")return JSON.stringify(e);try{return JSON.stringify(JSON.parse(e))}catch{return JSON.stringify(e)}}}function Fh(t){return typeof t=="string"?`'${t}'`:"[]"}var dme,NE,mme,hme,hH,gH,f2=Et(()=>{Ge();Ge();Pt();dme=Ze(Kde()),NE=Ze(AH());l2();pme();(0,dme.default)(NE.default);mme=(o=>(o.Dependencies="dependencies",o.DevDependencies="devDependencies",o.PeerDependencies="peerDependencies",o))(mme||{}),hme=["dependencies","devDependencies","peerDependencies"];hH=class{constructor(e,r){let o=1e3*e.workspaces.length;this.session=NE.default.create(o),fme(this.session,e),this.session.consult(":- use_module(library(lists))."),this.session.consult(r)}fetchNextAnswer(){return new Promise(e=>{this.session.answer(r=>{e(r)})})}async*makeQuery(e){let r=this.session.query(e);if(r!==!0)throw gme(r);for(;;){let o=await this.fetchNextAnswer();if(o===null)throw new Jt(79,"Resolution limit exceeded");if(!o)break;if(o.id==="throw")throw gme(o);yield o}}};gH=class t{constructor(e){this.source="";this.project=e;let r=e.configuration.get("constraintsPath");oe.existsSync(r)&&(this.source=oe.readFileSync(r,"utf8"))}static async find(e){return new t(e)}getProjectDatabase(){let e="";for(let r of hme)e+=`dependency_type(${r}). +`;for(let r of this.project.workspacesByCwd.values()){let o=r.relativeCwd;e+=`workspace(${Fh(o)}). +`,e+=`workspace_ident(${Fh(o)}, ${Fh(G.stringifyIdent(r.anchoredLocator))}). +`,e+=`workspace_version(${Fh(o)}, ${Fh(r.manifest.version)}). +`;for(let a of hme)for(let n of r.manifest[a].values())e+=`workspace_has_dependency(${Fh(o)}, ${Fh(G.stringifyIdent(n))}, ${Fh(n.range)}, ${a}). +`}return e+=`workspace(_) :- false. +`,e+=`workspace_ident(_, _) :- false. +`,e+=`workspace_version(_, _) :- false. +`,e+=`workspace_has_dependency(_, _, _, _) :- false. +`,e}getDeclarations(){let e="";return e+=`gen_enforced_dependency(_, _, _, _) :- false. +`,e+=`gen_enforced_field(_, _, _) :- false. +`,e}get fullSource(){return`${this.getProjectDatabase()} +${this.source} +${this.getDeclarations()}`}createSession(){return new hH(this.project,this.fullSource)}async processClassic(){let e=this.createSession();return{enforcedDependencies:await this.genEnforcedDependencies(e),enforcedFields:await this.genEnforcedFields(e)}}async process(){let{enforcedDependencies:e,enforcedFields:r}=await this.processClassic(),o=new Map;for(let{workspace:a,dependencyIdent:n,dependencyRange:u,dependencyType:A}of e){let p=a2([A,G.stringifyIdent(n)]),h=He.getMapWithDefault(o,a.cwd);He.getMapWithDefault(h,p).set(u??void 0,new Set)}for(let{workspace:a,fieldPath:n,fieldValue:u}of r){let A=a2(n),p=He.getMapWithDefault(o,a.cwd);He.getMapWithDefault(p,A).set(JSON.parse(u)??void 0,new Set)}return{manifestUpdates:o,reportedErrors:new Map}}async genEnforcedDependencies(e){let r=[];for await(let o of e.makeQuery("workspace(WorkspaceCwd), dependency_type(DependencyType), gen_enforced_dependency(WorkspaceCwd, DependencyIdent, DependencyRange, DependencyType).")){let a=K.resolve(this.project.cwd,Gg(o.links.WorkspaceCwd)),n=Gg(o.links.DependencyIdent),u=Gg(o.links.DependencyRange),A=Gg(o.links.DependencyType);if(a===null||n===null)throw new Error("Invalid rule");let p=this.project.getWorkspaceByCwd(a),h=G.parseIdent(n);r.push({workspace:p,dependencyIdent:h,dependencyRange:u,dependencyType:A})}return He.sortMap(r,[({dependencyRange:o})=>o!==null?"0":"1",({workspace:o})=>G.stringifyIdent(o.anchoredLocator),({dependencyIdent:o})=>G.stringifyIdent(o)])}async genEnforcedFields(e){let r=[];for await(let o of e.makeQuery("workspace(WorkspaceCwd), gen_enforced_field(WorkspaceCwd, FieldPath, FieldValue).")){let a=K.resolve(this.project.cwd,Gg(o.links.WorkspaceCwd)),n=Gg(o.links.FieldPath),u=ndt(o.links.FieldValue);if(a===null||n===null)throw new Error("Invalid rule");let A=this.project.getWorkspaceByCwd(a);r.push({workspace:A,fieldPath:n,fieldValue:u})}return He.sortMap(r,[({workspace:o})=>G.stringifyIdent(o.anchoredLocator),({fieldPath:o})=>o])}async*query(e){let r=this.createSession();for await(let o of r.makeQuery(e)){let a={};for(let[n,u]of Object.entries(o.links))n!=="_"&&(a[n]=Gg(u));yield a}}}});var Pme=_(Ak=>{"use strict";Object.defineProperty(Ak,"__esModule",{value:!0});function S2(t){let e=[...t.caches],r=e.shift();return r===void 0?Dme():{get(o,a,n={miss:()=>Promise.resolve()}){return r.get(o,a,n).catch(()=>S2({caches:e}).get(o,a,n))},set(o,a){return r.set(o,a).catch(()=>S2({caches:e}).set(o,a))},delete(o){return r.delete(o).catch(()=>S2({caches:e}).delete(o))},clear(){return r.clear().catch(()=>S2({caches:e}).clear())}}}function Dme(){return{get(t,e,r={miss:()=>Promise.resolve()}){return e().then(a=>Promise.all([a,r.miss(a)])).then(([a])=>a)},set(t,e){return Promise.resolve(e)},delete(t){return Promise.resolve()},clear(){return Promise.resolve()}}}Ak.createFallbackableCache=S2;Ak.createNullCache=Dme});var bme=_((MWt,Sme)=>{Sme.exports=Pme()});var xme=_(xH=>{"use strict";Object.defineProperty(xH,"__esModule",{value:!0});function Idt(t={serializable:!0}){let e={};return{get(r,o,a={miss:()=>Promise.resolve()}){let n=JSON.stringify(r);if(n in e)return Promise.resolve(t.serializable?JSON.parse(e[n]):e[n]);let u=o(),A=a&&a.miss||(()=>Promise.resolve());return u.then(p=>A(p)).then(()=>u)},set(r,o){return e[JSON.stringify(r)]=t.serializable?JSON.stringify(o):o,Promise.resolve(o)},delete(r){return delete e[JSON.stringify(r)],Promise.resolve()},clear(){return e={},Promise.resolve()}}}xH.createInMemoryCache=Idt});var Qme=_((UWt,kme)=>{kme.exports=xme()});var Rme=_(eu=>{"use strict";Object.defineProperty(eu,"__esModule",{value:!0});function Bdt(t,e,r){let o={"x-algolia-api-key":r,"x-algolia-application-id":e};return{headers(){return t===kH.WithinHeaders?o:{}},queryParameters(){return t===kH.WithinQueryParameters?o:{}}}}function vdt(t){let e=0,r=()=>(e++,new Promise(o=>{setTimeout(()=>{o(t(r))},Math.min(100*e,1e3))}));return t(r)}function Fme(t,e=(r,o)=>Promise.resolve()){return Object.assign(t,{wait(r){return Fme(t.then(o=>Promise.all([e(o,r),o])).then(o=>o[1]))}})}function Ddt(t){let e=t.length-1;for(e;e>0;e--){let r=Math.floor(Math.random()*(e+1)),o=t[e];t[e]=t[r],t[r]=o}return t}function Pdt(t,e){return e&&Object.keys(e).forEach(r=>{t[r]=e[r](t)}),t}function Sdt(t,...e){let r=0;return t.replace(/%s/g,()=>encodeURIComponent(e[r++]))}var bdt="4.22.1",xdt=t=>()=>t.transporter.requester.destroy(),kH={WithinQueryParameters:0,WithinHeaders:1};eu.AuthMode=kH;eu.addMethods=Pdt;eu.createAuth=Bdt;eu.createRetryablePromise=vdt;eu.createWaitablePromise=Fme;eu.destroy=xdt;eu.encode=Sdt;eu.shuffle=Ddt;eu.version=bdt});var b2=_((HWt,Tme)=>{Tme.exports=Rme()});var Nme=_(QH=>{"use strict";Object.defineProperty(QH,"__esModule",{value:!0});var kdt={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};QH.MethodEnum=kdt});var x2=_((jWt,Lme)=>{Lme.exports=Nme()});var Xme=_(Qi=>{"use strict";Object.defineProperty(Qi,"__esModule",{value:!0});var Ome=x2();function FH(t,e){let r=t||{},o=r.data||{};return Object.keys(r).forEach(a=>{["timeout","headers","queryParameters","data","cacheable"].indexOf(a)===-1&&(o[a]=r[a])}),{data:Object.entries(o).length>0?o:void 0,timeout:r.timeout||e,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var k2={Read:1,Write:2,Any:3},YE={Up:1,Down:2,Timeouted:3},Ume=2*60*1e3;function TH(t,e=YE.Up){return{...t,status:e,lastUpdate:Date.now()}}function _me(t){return t.status===YE.Up||Date.now()-t.lastUpdate>Ume}function Hme(t){return t.status===YE.Timeouted&&Date.now()-t.lastUpdate<=Ume}function NH(t){return typeof t=="string"?{protocol:"https",url:t,accept:k2.Any}:{protocol:t.protocol||"https",url:t.url,accept:t.accept||k2.Any}}function Qdt(t,e){return Promise.all(e.map(r=>t.get(r,()=>Promise.resolve(TH(r))))).then(r=>{let o=r.filter(A=>_me(A)),a=r.filter(A=>Hme(A)),n=[...o,...a],u=n.length>0?n.map(A=>NH(A)):e;return{getTimeout(A,p){return(a.length===0&&A===0?1:a.length+3+A)*p},statelessHosts:u}})}var Fdt=({isTimedOut:t,status:e})=>!t&&~~e===0,Rdt=t=>{let e=t.status;return t.isTimedOut||Fdt(t)||~~(e/100)!==2&&~~(e/100)!==4},Tdt=({status:t})=>~~(t/100)===2,Ndt=(t,e)=>Rdt(t)?e.onRetry(t):Tdt(t)?e.onSuccess(t):e.onFail(t);function Mme(t,e,r,o){let a=[],n=Wme(r,o),u=Kme(t,o),A=r.method,p=r.method!==Ome.MethodEnum.Get?{}:{...r.data,...o.data},h={"x-algolia-agent":t.userAgent.value,...t.queryParameters,...p,...o.queryParameters},E=0,I=(v,x)=>{let C=v.pop();if(C===void 0)throw Jme(RH(a));let R={data:n,headers:u,method:A,url:Gme(C,r.path,h),connectTimeout:x(E,t.timeouts.connect),responseTimeout:x(E,o.timeout)},L=z=>{let te={request:R,response:z,host:C,triesLeft:v.length};return a.push(te),te},U={onSuccess:z=>qme(z),onRetry(z){let te=L(z);return z.isTimedOut&&E++,Promise.all([t.logger.info("Retryable failure",LH(te)),t.hostsCache.set(C,TH(C,z.isTimedOut?YE.Timeouted:YE.Down))]).then(()=>I(v,x))},onFail(z){throw L(z),jme(z,RH(a))}};return t.requester.send(R).then(z=>Ndt(z,U))};return Qdt(t.hostsCache,e).then(v=>I([...v.statelessHosts].reverse(),v.getTimeout))}function Ldt(t){let{hostsCache:e,logger:r,requester:o,requestsCache:a,responsesCache:n,timeouts:u,userAgent:A,hosts:p,queryParameters:h,headers:E}=t,I={hostsCache:e,logger:r,requester:o,requestsCache:a,responsesCache:n,timeouts:u,userAgent:A,headers:E,queryParameters:h,hosts:p.map(v=>NH(v)),read(v,x){let C=FH(x,I.timeouts.read),R=()=>Mme(I,I.hosts.filter(z=>(z.accept&k2.Read)!==0),v,C);if((C.cacheable!==void 0?C.cacheable:v.cacheable)!==!0)return R();let U={request:v,mappedRequestOptions:C,transporter:{queryParameters:I.queryParameters,headers:I.headers}};return I.responsesCache.get(U,()=>I.requestsCache.get(U,()=>I.requestsCache.set(U,R()).then(z=>Promise.all([I.requestsCache.delete(U),z]),z=>Promise.all([I.requestsCache.delete(U),Promise.reject(z)])).then(([z,te])=>te)),{miss:z=>I.responsesCache.set(U,z)})},write(v,x){return Mme(I,I.hosts.filter(C=>(C.accept&k2.Write)!==0),v,FH(x,I.timeouts.write))}};return I}function Mdt(t){let e={value:`Algolia for JavaScript (${t})`,add(r){let o=`; ${r.segment}${r.version!==void 0?` (${r.version})`:""}`;return e.value.indexOf(o)===-1&&(e.value=`${e.value}${o}`),e}};return e}function qme(t){try{return JSON.parse(t.content)}catch(e){throw zme(e.message,t)}}function jme({content:t,status:e},r){let o=t;try{o=JSON.parse(t).message}catch{}return Vme(o,e,r)}function Odt(t,...e){let r=0;return t.replace(/%s/g,()=>encodeURIComponent(e[r++]))}function Gme(t,e,r){let o=Yme(r),a=`${t.protocol}://${t.url}/${e.charAt(0)==="/"?e.substr(1):e}`;return o.length&&(a+=`?${o}`),a}function Yme(t){let e=r=>Object.prototype.toString.call(r)==="[object Object]"||Object.prototype.toString.call(r)==="[object Array]";return Object.keys(t).map(r=>Odt("%s=%s",r,e(t[r])?JSON.stringify(t[r]):t[r])).join("&")}function Wme(t,e){if(t.method===Ome.MethodEnum.Get||t.data===void 0&&e.data===void 0)return;let r=Array.isArray(t.data)?t.data:{...t.data,...e.data};return JSON.stringify(r)}function Kme(t,e){let r={...t.headers,...e.headers},o={};return Object.keys(r).forEach(a=>{let n=r[a];o[a.toLowerCase()]=n}),o}function RH(t){return t.map(e=>LH(e))}function LH(t){let e=t.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return{...t,request:{...t.request,headers:{...t.request.headers,...e}}}}function Vme(t,e,r){return{name:"ApiError",message:t,status:e,transporterStackTrace:r}}function zme(t,e){return{name:"DeserializationError",message:t,response:e}}function Jme(t){return{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:t}}Qi.CallEnum=k2;Qi.HostStatusEnum=YE;Qi.createApiError=Vme;Qi.createDeserializationError=zme;Qi.createMappedRequestOptions=FH;Qi.createRetryError=Jme;Qi.createStatefulHost=TH;Qi.createStatelessHost=NH;Qi.createTransporter=Ldt;Qi.createUserAgent=Mdt;Qi.deserializeFailure=jme;Qi.deserializeSuccess=qme;Qi.isStatefulHostTimeouted=Hme;Qi.isStatefulHostUp=_me;Qi.serializeData=Wme;Qi.serializeHeaders=Kme;Qi.serializeQueryParameters=Yme;Qi.serializeUrl=Gme;Qi.stackFrameWithoutCredentials=LH;Qi.stackTraceWithoutCredentials=RH});var Q2=_((YWt,Zme)=>{Zme.exports=Xme()});var $me=_(Rh=>{"use strict";Object.defineProperty(Rh,"__esModule",{value:!0});var WE=b2(),Udt=Q2(),F2=x2(),_dt=t=>{let e=t.region||"us",r=WE.createAuth(WE.AuthMode.WithinHeaders,t.appId,t.apiKey),o=Udt.createTransporter({hosts:[{url:`analytics.${e}.algolia.com`}],...t,headers:{...r.headers(),"content-type":"application/json",...t.headers},queryParameters:{...r.queryParameters(),...t.queryParameters}}),a=t.appId;return WE.addMethods({appId:a,transporter:o},t.methods)},Hdt=t=>(e,r)=>t.transporter.write({method:F2.MethodEnum.Post,path:"2/abtests",data:e},r),qdt=t=>(e,r)=>t.transporter.write({method:F2.MethodEnum.Delete,path:WE.encode("2/abtests/%s",e)},r),jdt=t=>(e,r)=>t.transporter.read({method:F2.MethodEnum.Get,path:WE.encode("2/abtests/%s",e)},r),Gdt=t=>e=>t.transporter.read({method:F2.MethodEnum.Get,path:"2/abtests"},e),Ydt=t=>(e,r)=>t.transporter.write({method:F2.MethodEnum.Post,path:WE.encode("2/abtests/%s/stop",e)},r);Rh.addABTest=Hdt;Rh.createAnalyticsClient=_dt;Rh.deleteABTest=qdt;Rh.getABTest=jdt;Rh.getABTests=Gdt;Rh.stopABTest=Ydt});var tye=_((KWt,eye)=>{eye.exports=$me()});var nye=_(R2=>{"use strict";Object.defineProperty(R2,"__esModule",{value:!0});var MH=b2(),Wdt=Q2(),rye=x2(),Kdt=t=>{let e=t.region||"us",r=MH.createAuth(MH.AuthMode.WithinHeaders,t.appId,t.apiKey),o=Wdt.createTransporter({hosts:[{url:`personalization.${e}.algolia.com`}],...t,headers:{...r.headers(),"content-type":"application/json",...t.headers},queryParameters:{...r.queryParameters(),...t.queryParameters}});return MH.addMethods({appId:t.appId,transporter:o},t.methods)},Vdt=t=>e=>t.transporter.read({method:rye.MethodEnum.Get,path:"1/strategies/personalization"},e),zdt=t=>(e,r)=>t.transporter.write({method:rye.MethodEnum.Post,path:"1/strategies/personalization",data:e},r);R2.createPersonalizationClient=Kdt;R2.getPersonalizationStrategy=Vdt;R2.setPersonalizationStrategy=zdt});var sye=_((zWt,iye)=>{iye.exports=nye()});var Eye=_(Ft=>{"use strict";Object.defineProperty(Ft,"__esModule",{value:!0});var Gt=b2(),Ma=Q2(),Ir=x2(),Jdt=ve("crypto");function fk(t){let e=r=>t.request(r).then(o=>{if(t.batch!==void 0&&t.batch(o.hits),!t.shouldStop(o))return o.cursor?e({cursor:o.cursor}):e({page:(r.page||0)+1})});return e({})}var Xdt=t=>{let e=t.appId,r=Gt.createAuth(t.authMode!==void 0?t.authMode:Gt.AuthMode.WithinHeaders,e,t.apiKey),o=Ma.createTransporter({hosts:[{url:`${e}-dsn.algolia.net`,accept:Ma.CallEnum.Read},{url:`${e}.algolia.net`,accept:Ma.CallEnum.Write}].concat(Gt.shuffle([{url:`${e}-1.algolianet.com`},{url:`${e}-2.algolianet.com`},{url:`${e}-3.algolianet.com`}])),...t,headers:{...r.headers(),"content-type":"application/x-www-form-urlencoded",...t.headers},queryParameters:{...r.queryParameters(),...t.queryParameters}}),a={transporter:o,appId:e,addAlgoliaAgent(n,u){o.userAgent.add({segment:n,version:u})},clearCache(){return Promise.all([o.requestsCache.clear(),o.responsesCache.clear()]).then(()=>{})}};return Gt.addMethods(a,t.methods)};function oye(){return{name:"MissingObjectIDError",message:"All objects must have an unique objectID (like a primary key) to be valid. Algolia is also able to generate objectIDs automatically but *it's not recommended*. To do it, use the `{'autoGenerateObjectIDIfNotExist': true}` option."}}function aye(){return{name:"ObjectNotFoundError",message:"Object not found."}}function lye(){return{name:"ValidUntilNotFoundError",message:"ValidUntil not found in given secured api key."}}var Zdt=t=>(e,r)=>{let{queryParameters:o,...a}=r||{},n={acl:e,...o!==void 0?{queryParameters:o}:{}},u=(A,p)=>Gt.createRetryablePromise(h=>T2(t)(A.key,p).catch(E=>{if(E.status!==404)throw E;return h()}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:"1/keys",data:n},a),u)},$dt=t=>(e,r,o)=>{let a=Ma.createMappedRequestOptions(o);return a.queryParameters["X-Algolia-User-ID"]=e,t.transporter.write({method:Ir.MethodEnum.Post,path:"1/clusters/mapping",data:{cluster:r}},a)},emt=t=>(e,r,o)=>t.transporter.write({method:Ir.MethodEnum.Post,path:"1/clusters/mapping/batch",data:{users:e,cluster:r}},o),tmt=t=>(e,r)=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("/1/dictionaries/%s/batch",e),data:{clearExistingDictionaryEntries:!0,requests:{action:"addEntry",body:[]}}},r),(o,a)=>KE(t)(o.taskID,a)),pk=t=>(e,r,o)=>{let a=(n,u)=>N2(t)(e,{methods:{waitTask:es}}).waitTask(n.taskID,u);return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/operation",e),data:{operation:"copy",destination:r}},o),a)},rmt=t=>(e,r,o)=>pk(t)(e,r,{...o,scope:[gk.Rules]}),nmt=t=>(e,r,o)=>pk(t)(e,r,{...o,scope:[gk.Settings]}),imt=t=>(e,r,o)=>pk(t)(e,r,{...o,scope:[gk.Synonyms]}),smt=t=>(e,r)=>e.method===Ir.MethodEnum.Get?t.transporter.read(e,r):t.transporter.write(e,r),omt=t=>(e,r)=>{let o=(a,n)=>Gt.createRetryablePromise(u=>T2(t)(e,n).then(u).catch(A=>{if(A.status!==404)throw A}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Delete,path:Gt.encode("1/keys/%s",e)},r),o)},amt=t=>(e,r,o)=>{let a=r.map(n=>({action:"deleteEntry",body:{objectID:n}}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("/1/dictionaries/%s/batch",e),data:{clearExistingDictionaryEntries:!1,requests:a}},o),(n,u)=>KE(t)(n.taskID,u))},lmt=()=>(t,e)=>{let r=Ma.serializeQueryParameters(e),o=Jdt.createHmac("sha256",t).update(r).digest("hex");return Buffer.from(o+r).toString("base64")},T2=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/keys/%s",e)},r),cye=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/task/%s",e.toString())},r),cmt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"/1/dictionaries/*/settings"},e),umt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/logs"},e),Amt=()=>t=>{let e=Buffer.from(t,"base64").toString("ascii"),r=/validUntil=(\d+)/,o=e.match(r);if(o===null)throw lye();return parseInt(o[1],10)-Math.round(new Date().getTime()/1e3)},fmt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/clusters/mapping/top"},e),pmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/clusters/mapping/%s",e)},r),hmt=t=>e=>{let{retrieveMappings:r,...o}=e||{};return r===!0&&(o.getClusters=!0),t.transporter.read({method:Ir.MethodEnum.Get,path:"1/clusters/mapping/pending"},o)},N2=t=>(e,r={})=>{let o={transporter:t.transporter,appId:t.appId,indexName:e};return Gt.addMethods(o,r.methods)},gmt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/keys"},e),dmt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/clusters"},e),mmt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/indexes"},e),ymt=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:"1/clusters/mapping"},e),Emt=t=>(e,r,o)=>{let a=(n,u)=>N2(t)(e,{methods:{waitTask:es}}).waitTask(n.taskID,u);return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/operation",e),data:{operation:"move",destination:r}},o),a)},Cmt=t=>(e,r)=>{let o=(a,n)=>Promise.all(Object.keys(a.taskID).map(u=>N2(t)(u,{methods:{waitTask:es}}).waitTask(a.taskID[u],n)));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:"1/indexes/*/batch",data:{requests:e}},r),o)},wmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:e}},r),Imt=t=>(e,r)=>{let o=e.map(a=>({...a,params:Ma.serializeQueryParameters(a.params||{})}));return t.transporter.read({method:Ir.MethodEnum.Post,path:"1/indexes/*/queries",data:{requests:o},cacheable:!0},r)},Bmt=t=>(e,r)=>Promise.all(e.map(o=>{let{facetName:a,facetQuery:n,...u}=o.params;return N2(t)(o.indexName,{methods:{searchForFacetValues:dye}}).searchForFacetValues(a,n,{...r,...u})})),vmt=t=>(e,r)=>{let o=Ma.createMappedRequestOptions(r);return o.queryParameters["X-Algolia-User-ID"]=e,t.transporter.write({method:Ir.MethodEnum.Delete,path:"1/clusters/mapping"},o)},Dmt=t=>(e,r,o)=>{let a=r.map(n=>({action:"addEntry",body:n}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("/1/dictionaries/%s/batch",e),data:{clearExistingDictionaryEntries:!0,requests:a}},o),(n,u)=>KE(t)(n.taskID,u))},Pmt=t=>(e,r)=>{let o=(a,n)=>Gt.createRetryablePromise(u=>T2(t)(e,n).catch(A=>{if(A.status!==404)throw A;return u()}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/keys/%s/restore",e)},r),o)},Smt=t=>(e,r,o)=>{let a=r.map(n=>({action:"addEntry",body:n}));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("/1/dictionaries/%s/batch",e),data:{clearExistingDictionaryEntries:!1,requests:a}},o),(n,u)=>KE(t)(n.taskID,u))},bmt=t=>(e,r,o)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("/1/dictionaries/%s/search",e),data:{query:r},cacheable:!0},o),xmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Post,path:"1/clusters/mapping/search",data:{query:e}},r),kmt=t=>(e,r)=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Put,path:"/1/dictionaries/*/settings",data:e},r),(o,a)=>KE(t)(o.taskID,a)),Qmt=t=>(e,r)=>{let o=Object.assign({},r),{queryParameters:a,...n}=r||{},u=a?{queryParameters:a}:{},A=["acl","indexes","referers","restrictSources","queryParameters","description","maxQueriesPerIPPerHour","maxHitsPerQuery"],p=E=>Object.keys(o).filter(I=>A.indexOf(I)!==-1).every(I=>{if(Array.isArray(E[I])&&Array.isArray(o[I])){let v=E[I];return v.length===o[I].length&&v.every((x,C)=>x===o[I][C])}else return E[I]===o[I]}),h=(E,I)=>Gt.createRetryablePromise(v=>T2(t)(e,I).then(x=>p(x)?Promise.resolve():v()));return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Put,path:Gt.encode("1/keys/%s",e),data:u},n),h)},KE=t=>(e,r)=>Gt.createRetryablePromise(o=>cye(t)(e,r).then(a=>a.status!=="published"?o():void 0)),uye=t=>(e,r)=>{let o=(a,n)=>es(t)(a.taskID,n);return Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/batch",t.indexName),data:{requests:e}},r),o)},Fmt=t=>e=>fk({shouldStop:r=>r.cursor===void 0,...e,request:r=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/browse",t.indexName),data:r},e)}),Rmt=t=>e=>{let r={hitsPerPage:1e3,...e};return fk({shouldStop:o=>o.hits.length({...a,hits:a.hits.map(n=>(delete n._highlightResult,n))}))}})},Tmt=t=>e=>{let r={hitsPerPage:1e3,...e};return fk({shouldStop:o=>o.hits.length({...a,hits:a.hits.map(n=>(delete n._highlightResult,n))}))}})},hk=t=>(e,r,o)=>{let{batchSize:a,...n}=o||{},u={taskIDs:[],objectIDs:[]},A=(p=0)=>{let h=[],E;for(E=p;E({action:r,body:I})),n).then(I=>(u.objectIDs=u.objectIDs.concat(I.objectIDs),u.taskIDs.push(I.taskID),E++,A(E)))};return Gt.createWaitablePromise(A(),(p,h)=>Promise.all(p.taskIDs.map(E=>es(t)(E,h))))},Nmt=t=>e=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/clear",t.indexName)},e),(r,o)=>es(t)(r.taskID,o)),Lmt=t=>e=>{let{forwardToReplicas:r,...o}=e||{},a=Ma.createMappedRequestOptions(o);return r&&(a.queryParameters.forwardToReplicas=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/rules/clear",t.indexName)},a),(n,u)=>es(t)(n.taskID,u))},Mmt=t=>e=>{let{forwardToReplicas:r,...o}=e||{},a=Ma.createMappedRequestOptions(o);return r&&(a.queryParameters.forwardToReplicas=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/synonyms/clear",t.indexName)},a),(n,u)=>es(t)(n.taskID,u))},Omt=t=>(e,r)=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/deleteByQuery",t.indexName),data:e},r),(o,a)=>es(t)(o.taskID,a)),Umt=t=>e=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Delete,path:Gt.encode("1/indexes/%s",t.indexName)},e),(r,o)=>es(t)(r.taskID,o)),_mt=t=>(e,r)=>Gt.createWaitablePromise(Aye(t)([e],r).then(o=>({taskID:o.taskIDs[0]})),(o,a)=>es(t)(o.taskID,a)),Aye=t=>(e,r)=>{let o=e.map(a=>({objectID:a}));return hk(t)(o,Wg.DeleteObject,r)},Hmt=t=>(e,r)=>{let{forwardToReplicas:o,...a}=r||{},n=Ma.createMappedRequestOptions(a);return o&&(n.queryParameters.forwardToReplicas=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Delete,path:Gt.encode("1/indexes/%s/rules/%s",t.indexName,e)},n),(u,A)=>es(t)(u.taskID,A))},qmt=t=>(e,r)=>{let{forwardToReplicas:o,...a}=r||{},n=Ma.createMappedRequestOptions(a);return o&&(n.queryParameters.forwardToReplicas=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Delete,path:Gt.encode("1/indexes/%s/synonyms/%s",t.indexName,e)},n),(u,A)=>es(t)(u.taskID,A))},jmt=t=>e=>fye(t)(e).then(()=>!0).catch(r=>{if(r.status!==404)throw r;return!1}),Gmt=t=>(e,r,o)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/answers/%s/prediction",t.indexName),data:{query:e,queryLanguages:r},cacheable:!0},o),Ymt=t=>(e,r)=>{let{query:o,paginate:a,...n}=r||{},u=0,A=()=>gye(t)(o||"",{...n,page:u}).then(p=>{for(let[h,E]of Object.entries(p.hits))if(e(E))return{object:E,position:parseInt(h,10),page:u};if(u++,a===!1||u>=p.nbPages)throw aye();return A()});return A()},Wmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/indexes/%s/%s",t.indexName,e)},r),Kmt=()=>(t,e)=>{for(let[r,o]of Object.entries(t.hits))if(o.objectID===e)return parseInt(r,10);return-1},Vmt=t=>(e,r)=>{let{attributesToRetrieve:o,...a}=r||{},n=e.map(u=>({indexName:t.indexName,objectID:u,...o?{attributesToRetrieve:o}:{}}));return t.transporter.read({method:Ir.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:n}},a)},zmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/indexes/%s/rules/%s",t.indexName,e)},r),fye=t=>e=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/indexes/%s/settings",t.indexName),data:{getVersion:2}},e),Jmt=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/indexes/%s/synonyms/%s",t.indexName,e)},r),pye=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Get,path:Gt.encode("1/indexes/%s/task/%s",t.indexName,e.toString())},r),Xmt=t=>(e,r)=>Gt.createWaitablePromise(hye(t)([e],r).then(o=>({objectID:o.objectIDs[0],taskID:o.taskIDs[0]})),(o,a)=>es(t)(o.taskID,a)),hye=t=>(e,r)=>{let{createIfNotExists:o,...a}=r||{},n=o?Wg.PartialUpdateObject:Wg.PartialUpdateObjectNoCreate;return hk(t)(e,n,a)},Zmt=t=>(e,r)=>{let{safe:o,autoGenerateObjectIDIfNotExist:a,batchSize:n,...u}=r||{},A=(C,R,L,U)=>Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/operation",C),data:{operation:L,destination:R}},U),(z,te)=>es(t)(z.taskID,te)),p=Math.random().toString(36).substring(7),h=`${t.indexName}_tmp_${p}`,E=OH({appId:t.appId,transporter:t.transporter,indexName:h}),I=[],v=A(t.indexName,h,"copy",{...u,scope:["settings","synonyms","rules"]});I.push(v);let x=(o?v.wait(u):v).then(()=>{let C=E(e,{...u,autoGenerateObjectIDIfNotExist:a,batchSize:n});return I.push(C),o?C.wait(u):C}).then(()=>{let C=A(h,t.indexName,"move",u);return I.push(C),o?C.wait(u):C}).then(()=>Promise.all(I)).then(([C,R,L])=>({objectIDs:R.objectIDs,taskIDs:[C.taskID,...R.taskIDs,L.taskID]}));return Gt.createWaitablePromise(x,(C,R)=>Promise.all(I.map(L=>L.wait(R))))},$mt=t=>(e,r)=>UH(t)(e,{...r,clearExistingRules:!0}),eyt=t=>(e,r)=>_H(t)(e,{...r,clearExistingSynonyms:!0}),tyt=t=>(e,r)=>Gt.createWaitablePromise(OH(t)([e],r).then(o=>({objectID:o.objectIDs[0],taskID:o.taskIDs[0]})),(o,a)=>es(t)(o.taskID,a)),OH=t=>(e,r)=>{let{autoGenerateObjectIDIfNotExist:o,...a}=r||{},n=o?Wg.AddObject:Wg.UpdateObject;if(n===Wg.UpdateObject){for(let u of e)if(u.objectID===void 0)return Gt.createWaitablePromise(Promise.reject(oye()))}return hk(t)(e,n,a)},ryt=t=>(e,r)=>UH(t)([e],r),UH=t=>(e,r)=>{let{forwardToReplicas:o,clearExistingRules:a,...n}=r||{},u=Ma.createMappedRequestOptions(n);return o&&(u.queryParameters.forwardToReplicas=1),a&&(u.queryParameters.clearExistingRules=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/rules/batch",t.indexName),data:e},u),(A,p)=>es(t)(A.taskID,p))},nyt=t=>(e,r)=>_H(t)([e],r),_H=t=>(e,r)=>{let{forwardToReplicas:o,clearExistingSynonyms:a,replaceExistingSynonyms:n,...u}=r||{},A=Ma.createMappedRequestOptions(u);return o&&(A.queryParameters.forwardToReplicas=1),(n||a)&&(A.queryParameters.replaceExistingSynonyms=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/synonyms/batch",t.indexName),data:e},A),(p,h)=>es(t)(p.taskID,h))},gye=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/query",t.indexName),data:{query:e},cacheable:!0},r),dye=t=>(e,r,o)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/facets/%s/query",t.indexName,e),data:{facetQuery:r},cacheable:!0},o),mye=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/rules/search",t.indexName),data:{query:e}},r),yye=t=>(e,r)=>t.transporter.read({method:Ir.MethodEnum.Post,path:Gt.encode("1/indexes/%s/synonyms/search",t.indexName),data:{query:e}},r),iyt=t=>(e,r)=>{let{forwardToReplicas:o,...a}=r||{},n=Ma.createMappedRequestOptions(a);return o&&(n.queryParameters.forwardToReplicas=1),Gt.createWaitablePromise(t.transporter.write({method:Ir.MethodEnum.Put,path:Gt.encode("1/indexes/%s/settings",t.indexName),data:e},n),(u,A)=>es(t)(u.taskID,A))},es=t=>(e,r)=>Gt.createRetryablePromise(o=>pye(t)(e,r).then(a=>a.status!=="published"?o():void 0)),syt={AddObject:"addObject",Analytics:"analytics",Browser:"browse",DeleteIndex:"deleteIndex",DeleteObject:"deleteObject",EditSettings:"editSettings",Inference:"inference",ListIndexes:"listIndexes",Logs:"logs",Personalization:"personalization",Recommendation:"recommendation",Search:"search",SeeUnretrievableAttributes:"seeUnretrievableAttributes",Settings:"settings",Usage:"usage"},Wg={AddObject:"addObject",UpdateObject:"updateObject",PartialUpdateObject:"partialUpdateObject",PartialUpdateObjectNoCreate:"partialUpdateObjectNoCreate",DeleteObject:"deleteObject",DeleteIndex:"delete",ClearIndex:"clear"},gk={Settings:"settings",Synonyms:"synonyms",Rules:"rules"},oyt={None:"none",StopIfEnoughMatches:"stopIfEnoughMatches"},ayt={Synonym:"synonym",OneWaySynonym:"oneWaySynonym",AltCorrection1:"altCorrection1",AltCorrection2:"altCorrection2",Placeholder:"placeholder"};Ft.ApiKeyACLEnum=syt;Ft.BatchActionEnum=Wg;Ft.ScopeEnum=gk;Ft.StrategyEnum=oyt;Ft.SynonymEnum=ayt;Ft.addApiKey=Zdt;Ft.assignUserID=$dt;Ft.assignUserIDs=emt;Ft.batch=uye;Ft.browseObjects=Fmt;Ft.browseRules=Rmt;Ft.browseSynonyms=Tmt;Ft.chunkedBatch=hk;Ft.clearDictionaryEntries=tmt;Ft.clearObjects=Nmt;Ft.clearRules=Lmt;Ft.clearSynonyms=Mmt;Ft.copyIndex=pk;Ft.copyRules=rmt;Ft.copySettings=nmt;Ft.copySynonyms=imt;Ft.createBrowsablePromise=fk;Ft.createMissingObjectIDError=oye;Ft.createObjectNotFoundError=aye;Ft.createSearchClient=Xdt;Ft.createValidUntilNotFoundError=lye;Ft.customRequest=smt;Ft.deleteApiKey=omt;Ft.deleteBy=Omt;Ft.deleteDictionaryEntries=amt;Ft.deleteIndex=Umt;Ft.deleteObject=_mt;Ft.deleteObjects=Aye;Ft.deleteRule=Hmt;Ft.deleteSynonym=qmt;Ft.exists=jmt;Ft.findAnswers=Gmt;Ft.findObject=Ymt;Ft.generateSecuredApiKey=lmt;Ft.getApiKey=T2;Ft.getAppTask=cye;Ft.getDictionarySettings=cmt;Ft.getLogs=umt;Ft.getObject=Wmt;Ft.getObjectPosition=Kmt;Ft.getObjects=Vmt;Ft.getRule=zmt;Ft.getSecuredApiKeyRemainingValidity=Amt;Ft.getSettings=fye;Ft.getSynonym=Jmt;Ft.getTask=pye;Ft.getTopUserIDs=fmt;Ft.getUserID=pmt;Ft.hasPendingMappings=hmt;Ft.initIndex=N2;Ft.listApiKeys=gmt;Ft.listClusters=dmt;Ft.listIndices=mmt;Ft.listUserIDs=ymt;Ft.moveIndex=Emt;Ft.multipleBatch=Cmt;Ft.multipleGetObjects=wmt;Ft.multipleQueries=Imt;Ft.multipleSearchForFacetValues=Bmt;Ft.partialUpdateObject=Xmt;Ft.partialUpdateObjects=hye;Ft.removeUserID=vmt;Ft.replaceAllObjects=Zmt;Ft.replaceAllRules=$mt;Ft.replaceAllSynonyms=eyt;Ft.replaceDictionaryEntries=Dmt;Ft.restoreApiKey=Pmt;Ft.saveDictionaryEntries=Smt;Ft.saveObject=tyt;Ft.saveObjects=OH;Ft.saveRule=ryt;Ft.saveRules=UH;Ft.saveSynonym=nyt;Ft.saveSynonyms=_H;Ft.search=gye;Ft.searchDictionaryEntries=bmt;Ft.searchForFacetValues=dye;Ft.searchRules=mye;Ft.searchSynonyms=yye;Ft.searchUserIDs=xmt;Ft.setDictionarySettings=kmt;Ft.setSettings=iyt;Ft.updateApiKey=Qmt;Ft.waitAppTask=KE;Ft.waitTask=es});var wye=_((XWt,Cye)=>{Cye.exports=Eye()});var Iye=_(dk=>{"use strict";Object.defineProperty(dk,"__esModule",{value:!0});function lyt(){return{debug(t,e){return Promise.resolve()},info(t,e){return Promise.resolve()},error(t,e){return Promise.resolve()}}}var cyt={Debug:1,Info:2,Error:3};dk.LogLevelEnum=cyt;dk.createNullLogger=lyt});var vye=_(($Wt,Bye)=>{Bye.exports=Iye()});var bye=_(HH=>{"use strict";Object.defineProperty(HH,"__esModule",{value:!0});var Dye=ve("http"),Pye=ve("https"),uyt=ve("url"),Sye={keepAlive:!0},Ayt=new Dye.Agent(Sye),fyt=new Pye.Agent(Sye);function pyt({agent:t,httpAgent:e,httpsAgent:r,requesterOptions:o={}}={}){let a=e||t||Ayt,n=r||t||fyt;return{send(u){return new Promise(A=>{let p=uyt.parse(u.url),h=p.query===null?p.pathname:`${p.pathname}?${p.query}`,E={...o,agent:p.protocol==="https:"?n:a,hostname:p.hostname,path:h,method:u.method,headers:{...o&&o.headers?o.headers:{},...u.headers},...p.port!==void 0?{port:p.port||""}:{}},I=(p.protocol==="https:"?Pye:Dye).request(E,R=>{let L=[];R.on("data",U=>{L=L.concat(U)}),R.on("end",()=>{clearTimeout(x),clearTimeout(C),A({status:R.statusCode||0,content:Buffer.concat(L).toString(),isTimedOut:!1})})}),v=(R,L)=>setTimeout(()=>{I.abort(),A({status:0,content:L,isTimedOut:!0})},R*1e3),x=v(u.connectTimeout,"Connection timeout"),C;I.on("error",R=>{clearTimeout(x),clearTimeout(C),A({status:0,content:R.message,isTimedOut:!1})}),I.once("response",()=>{clearTimeout(x),C=v(u.responseTimeout,"Socket timeout")}),u.data!==void 0&&I.write(u.data),I.end()})},destroy(){return a.destroy(),n.destroy(),Promise.resolve()}}}HH.createNodeHttpRequester=pyt});var kye=_((tKt,xye)=>{xye.exports=bye()});var Tye=_((rKt,Rye)=>{"use strict";var Qye=bme(),hyt=Qme(),VE=tye(),jH=b2(),qH=sye(),_t=wye(),gyt=vye(),dyt=kye(),myt=Q2();function Fye(t,e,r){let o={appId:t,apiKey:e,timeouts:{connect:2,read:5,write:30},requester:dyt.createNodeHttpRequester(),logger:gyt.createNullLogger(),responsesCache:Qye.createNullCache(),requestsCache:Qye.createNullCache(),hostsCache:hyt.createInMemoryCache(),userAgent:myt.createUserAgent(jH.version).add({segment:"Node.js",version:process.versions.node})},a={...o,...r},n=()=>u=>qH.createPersonalizationClient({...o,...u,methods:{getPersonalizationStrategy:qH.getPersonalizationStrategy,setPersonalizationStrategy:qH.setPersonalizationStrategy}});return _t.createSearchClient({...a,methods:{search:_t.multipleQueries,searchForFacetValues:_t.multipleSearchForFacetValues,multipleBatch:_t.multipleBatch,multipleGetObjects:_t.multipleGetObjects,multipleQueries:_t.multipleQueries,copyIndex:_t.copyIndex,copySettings:_t.copySettings,copyRules:_t.copyRules,copySynonyms:_t.copySynonyms,moveIndex:_t.moveIndex,listIndices:_t.listIndices,getLogs:_t.getLogs,listClusters:_t.listClusters,multipleSearchForFacetValues:_t.multipleSearchForFacetValues,getApiKey:_t.getApiKey,addApiKey:_t.addApiKey,listApiKeys:_t.listApiKeys,updateApiKey:_t.updateApiKey,deleteApiKey:_t.deleteApiKey,restoreApiKey:_t.restoreApiKey,assignUserID:_t.assignUserID,assignUserIDs:_t.assignUserIDs,getUserID:_t.getUserID,searchUserIDs:_t.searchUserIDs,listUserIDs:_t.listUserIDs,getTopUserIDs:_t.getTopUserIDs,removeUserID:_t.removeUserID,hasPendingMappings:_t.hasPendingMappings,generateSecuredApiKey:_t.generateSecuredApiKey,getSecuredApiKeyRemainingValidity:_t.getSecuredApiKeyRemainingValidity,destroy:jH.destroy,clearDictionaryEntries:_t.clearDictionaryEntries,deleteDictionaryEntries:_t.deleteDictionaryEntries,getDictionarySettings:_t.getDictionarySettings,getAppTask:_t.getAppTask,replaceDictionaryEntries:_t.replaceDictionaryEntries,saveDictionaryEntries:_t.saveDictionaryEntries,searchDictionaryEntries:_t.searchDictionaryEntries,setDictionarySettings:_t.setDictionarySettings,waitAppTask:_t.waitAppTask,customRequest:_t.customRequest,initIndex:u=>A=>_t.initIndex(u)(A,{methods:{batch:_t.batch,delete:_t.deleteIndex,findAnswers:_t.findAnswers,getObject:_t.getObject,getObjects:_t.getObjects,saveObject:_t.saveObject,saveObjects:_t.saveObjects,search:_t.search,searchForFacetValues:_t.searchForFacetValues,waitTask:_t.waitTask,setSettings:_t.setSettings,getSettings:_t.getSettings,partialUpdateObject:_t.partialUpdateObject,partialUpdateObjects:_t.partialUpdateObjects,deleteObject:_t.deleteObject,deleteObjects:_t.deleteObjects,deleteBy:_t.deleteBy,clearObjects:_t.clearObjects,browseObjects:_t.browseObjects,getObjectPosition:_t.getObjectPosition,findObject:_t.findObject,exists:_t.exists,saveSynonym:_t.saveSynonym,saveSynonyms:_t.saveSynonyms,getSynonym:_t.getSynonym,searchSynonyms:_t.searchSynonyms,browseSynonyms:_t.browseSynonyms,deleteSynonym:_t.deleteSynonym,clearSynonyms:_t.clearSynonyms,replaceAllObjects:_t.replaceAllObjects,replaceAllSynonyms:_t.replaceAllSynonyms,searchRules:_t.searchRules,getRule:_t.getRule,deleteRule:_t.deleteRule,saveRule:_t.saveRule,saveRules:_t.saveRules,replaceAllRules:_t.replaceAllRules,browseRules:_t.browseRules,clearRules:_t.clearRules}}),initAnalytics:()=>u=>VE.createAnalyticsClient({...o,...u,methods:{addABTest:VE.addABTest,getABTest:VE.getABTest,getABTests:VE.getABTests,stopABTest:VE.stopABTest,deleteABTest:VE.deleteABTest}}),initPersonalization:n,initRecommendation:()=>u=>(a.logger.info("The `initRecommendation` method is deprecated. Use `initPersonalization` instead."),n()(u))}})}Fye.version=jH.version;Rye.exports=Fye});var YH=_((nKt,GH)=>{var Nye=Tye();GH.exports=Nye;GH.exports.default=Nye});var VH=_((sKt,Oye)=>{"use strict";var Mye=Object.getOwnPropertySymbols,Eyt=Object.prototype.hasOwnProperty,Cyt=Object.prototype.propertyIsEnumerable;function wyt(t){if(t==null)throw new TypeError("Object.assign cannot be called with null or undefined");return Object(t)}function Iyt(){try{if(!Object.assign)return!1;var t=new String("abc");if(t[5]="de",Object.getOwnPropertyNames(t)[0]==="5")return!1;for(var e={},r=0;r<10;r++)e["_"+String.fromCharCode(r)]=r;var o=Object.getOwnPropertyNames(e).map(function(n){return e[n]});if(o.join("")!=="0123456789")return!1;var a={};return"abcdefghijklmnopqrst".split("").forEach(function(n){a[n]=n}),Object.keys(Object.assign({},a)).join("")==="abcdefghijklmnopqrst"}catch{return!1}}Oye.exports=Iyt()?Object.assign:function(t,e){for(var r,o=wyt(t),a,n=1;n{"use strict";var $H=VH(),tu=typeof Symbol=="function"&&Symbol.for,L2=tu?Symbol.for("react.element"):60103,Byt=tu?Symbol.for("react.portal"):60106,vyt=tu?Symbol.for("react.fragment"):60107,Dyt=tu?Symbol.for("react.strict_mode"):60108,Pyt=tu?Symbol.for("react.profiler"):60114,Syt=tu?Symbol.for("react.provider"):60109,byt=tu?Symbol.for("react.context"):60110,xyt=tu?Symbol.for("react.forward_ref"):60112,kyt=tu?Symbol.for("react.suspense"):60113,Qyt=tu?Symbol.for("react.memo"):60115,Fyt=tu?Symbol.for("react.lazy"):60116,Uye=typeof Symbol=="function"&&Symbol.iterator;function M2(t){for(var e="https://reactjs.org/docs/error-decoder.html?invariant="+t,r=1;rmk.length&&mk.push(t)}function JH(t,e,r,o){var a=typeof t;(a==="undefined"||a==="boolean")&&(t=null);var n=!1;if(t===null)n=!0;else switch(a){case"string":case"number":n=!0;break;case"object":switch(t.$$typeof){case L2:case Byt:n=!0}}if(n)return r(o,t,e===""?"."+zH(t,0):e),1;if(n=0,e=e===""?".":e+":",Array.isArray(t))for(var u=0;u{"use strict";Xye.exports=Jye()});var s6=_((lKt,i6)=>{"use strict";var fn=i6.exports;i6.exports.default=fn;var Ln="\x1B[",O2="\x1B]",JE="\x07",yk=";",Zye=process.env.TERM_PROGRAM==="Apple_Terminal";fn.cursorTo=(t,e)=>{if(typeof t!="number")throw new TypeError("The `x` argument is required");return typeof e!="number"?Ln+(t+1)+"G":Ln+(e+1)+";"+(t+1)+"H"};fn.cursorMove=(t,e)=>{if(typeof t!="number")throw new TypeError("The `x` argument is required");let r="";return t<0?r+=Ln+-t+"D":t>0&&(r+=Ln+t+"C"),e<0?r+=Ln+-e+"A":e>0&&(r+=Ln+e+"B"),r};fn.cursorUp=(t=1)=>Ln+t+"A";fn.cursorDown=(t=1)=>Ln+t+"B";fn.cursorForward=(t=1)=>Ln+t+"C";fn.cursorBackward=(t=1)=>Ln+t+"D";fn.cursorLeft=Ln+"G";fn.cursorSavePosition=Zye?"\x1B7":Ln+"s";fn.cursorRestorePosition=Zye?"\x1B8":Ln+"u";fn.cursorGetPosition=Ln+"6n";fn.cursorNextLine=Ln+"E";fn.cursorPrevLine=Ln+"F";fn.cursorHide=Ln+"?25l";fn.cursorShow=Ln+"?25h";fn.eraseLines=t=>{let e="";for(let r=0;r[O2,"8",yk,yk,e,JE,t,O2,"8",yk,yk,JE].join("");fn.image=(t,e={})=>{let r=`${O2}1337;File=inline=1`;return e.width&&(r+=`;width=${e.width}`),e.height&&(r+=`;height=${e.height}`),e.preserveAspectRatio===!1&&(r+=";preserveAspectRatio=0"),r+":"+t.toString("base64")+JE};fn.iTerm={setCwd:(t=process.cwd())=>`${O2}50;CurrentDir=${t}${JE}`,annotation:(t,e={})=>{let r=`${O2}1337;`,o=typeof e.x<"u",a=typeof e.y<"u";if((o||a)&&!(o&&a&&typeof e.length<"u"))throw new Error("`x`, `y` and `length` must be defined when `x` or `y` is defined");return t=t.replace(/\|/g,""),r+=e.isHidden?"AddHiddenAnnotation=":"AddAnnotation=",e.length>0?r+=(o?[t,e.length,e.x,e.y]:[e.length,t]).join("|"):r+=t,r+JE}}});var eEe=_((cKt,o6)=>{"use strict";var $ye=(t,e)=>{for(let r of Reflect.ownKeys(e))Object.defineProperty(t,r,Object.getOwnPropertyDescriptor(e,r));return t};o6.exports=$ye;o6.exports.default=$ye});var rEe=_((uKt,Ck)=>{"use strict";var Oyt=eEe(),Ek=new WeakMap,tEe=(t,e={})=>{if(typeof t!="function")throw new TypeError("Expected a function");let r,o=0,a=t.displayName||t.name||"",n=function(...u){if(Ek.set(n,++o),o===1)r=t.apply(this,u),t=null;else if(e.throw===!0)throw new Error(`Function \`${a}\` can only be called once`);return r};return Oyt(n,t),Ek.set(n,o),n};Ck.exports=tEe;Ck.exports.default=tEe;Ck.exports.callCount=t=>{if(!Ek.has(t))throw new Error(`The given function \`${t.name}\` is not wrapped by the \`onetime\` package`);return Ek.get(t)}});var nEe=_((AKt,wk)=>{wk.exports=["SIGABRT","SIGALRM","SIGHUP","SIGINT","SIGTERM"];process.platform!=="win32"&&wk.exports.push("SIGVTALRM","SIGXCPU","SIGXFSZ","SIGUSR2","SIGTRAP","SIGSYS","SIGQUIT","SIGIOT");process.platform==="linux"&&wk.exports.push("SIGIO","SIGPOLL","SIGPWR","SIGSTKFLT","SIGUNUSED")});var c6=_((fKt,$E)=>{var yi=global.process,Kg=function(t){return t&&typeof t=="object"&&typeof t.removeListener=="function"&&typeof t.emit=="function"&&typeof t.reallyExit=="function"&&typeof t.listeners=="function"&&typeof t.kill=="function"&&typeof t.pid=="number"&&typeof t.on=="function"};Kg(yi)?(iEe=ve("assert"),XE=nEe(),sEe=/^win/i.test(yi.platform),U2=ve("events"),typeof U2!="function"&&(U2=U2.EventEmitter),yi.__signal_exit_emitter__?Ls=yi.__signal_exit_emitter__:(Ls=yi.__signal_exit_emitter__=new U2,Ls.count=0,Ls.emitted={}),Ls.infinite||(Ls.setMaxListeners(1/0),Ls.infinite=!0),$E.exports=function(t,e){if(!Kg(global.process))return function(){};iEe.equal(typeof t,"function","a callback must be provided for exit handler"),ZE===!1&&a6();var r="exit";e&&e.alwaysLast&&(r="afterexit");var o=function(){Ls.removeListener(r,t),Ls.listeners("exit").length===0&&Ls.listeners("afterexit").length===0&&Ik()};return Ls.on(r,t),o},Ik=function(){!ZE||!Kg(global.process)||(ZE=!1,XE.forEach(function(e){try{yi.removeListener(e,Bk[e])}catch{}}),yi.emit=vk,yi.reallyExit=l6,Ls.count-=1)},$E.exports.unload=Ik,Vg=function(e,r,o){Ls.emitted[e]||(Ls.emitted[e]=!0,Ls.emit(e,r,o))},Bk={},XE.forEach(function(t){Bk[t]=function(){if(Kg(global.process)){var r=yi.listeners(t);r.length===Ls.count&&(Ik(),Vg("exit",null,t),Vg("afterexit",null,t),sEe&&t==="SIGHUP"&&(t="SIGINT"),yi.kill(yi.pid,t))}}}),$E.exports.signals=function(){return XE},ZE=!1,a6=function(){ZE||!Kg(global.process)||(ZE=!0,Ls.count+=1,XE=XE.filter(function(e){try{return yi.on(e,Bk[e]),!0}catch{return!1}}),yi.emit=aEe,yi.reallyExit=oEe)},$E.exports.load=a6,l6=yi.reallyExit,oEe=function(e){Kg(global.process)&&(yi.exitCode=e||0,Vg("exit",yi.exitCode,null),Vg("afterexit",yi.exitCode,null),l6.call(yi,yi.exitCode))},vk=yi.emit,aEe=function(e,r){if(e==="exit"&&Kg(global.process)){r!==void 0&&(yi.exitCode=r);var o=vk.apply(this,arguments);return Vg("exit",yi.exitCode,null),Vg("afterexit",yi.exitCode,null),o}else return vk.apply(this,arguments)}):$E.exports=function(){return function(){}};var iEe,XE,sEe,U2,Ls,Ik,Vg,Bk,ZE,a6,l6,oEe,vk,aEe});var cEe=_((pKt,lEe)=>{"use strict";var Uyt=rEe(),_yt=c6();lEe.exports=Uyt(()=>{_yt(()=>{process.stderr.write("\x1B[?25h")},{alwaysLast:!0})})});var u6=_(eC=>{"use strict";var Hyt=cEe(),Dk=!1;eC.show=(t=process.stderr)=>{t.isTTY&&(Dk=!1,t.write("\x1B[?25h"))};eC.hide=(t=process.stderr)=>{t.isTTY&&(Hyt(),Dk=!0,t.write("\x1B[?25l"))};eC.toggle=(t,e)=>{t!==void 0&&(Dk=t),Dk?eC.show(e):eC.hide(e)}});var pEe=_(_2=>{"use strict";var fEe=_2&&_2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(_2,"__esModule",{value:!0});var uEe=fEe(s6()),AEe=fEe(u6()),qyt=(t,{showCursor:e=!1}={})=>{let r=0,o="",a=!1,n=u=>{!e&&!a&&(AEe.default.hide(),a=!0);let A=u+` +`;A!==o&&(o=A,t.write(uEe.default.eraseLines(r)+A),r=A.split(` +`).length)};return n.clear=()=>{t.write(uEe.default.eraseLines(r)),o="",r=0},n.done=()=>{o="",r=0,e||(AEe.default.show(),a=!1)},n};_2.default={create:qyt}});var hEe=_((dKt,jyt)=>{jyt.exports=[{name:"AppVeyor",constant:"APPVEYOR",env:"APPVEYOR",pr:"APPVEYOR_PULL_REQUEST_NUMBER"},{name:"Azure Pipelines",constant:"AZURE_PIPELINES",env:"SYSTEM_TEAMFOUNDATIONCOLLECTIONURI",pr:"SYSTEM_PULLREQUEST_PULLREQUESTID"},{name:"Bamboo",constant:"BAMBOO",env:"bamboo_planKey"},{name:"Bitbucket Pipelines",constant:"BITBUCKET",env:"BITBUCKET_COMMIT",pr:"BITBUCKET_PR_ID"},{name:"Bitrise",constant:"BITRISE",env:"BITRISE_IO",pr:"BITRISE_PULL_REQUEST"},{name:"Buddy",constant:"BUDDY",env:"BUDDY_WORKSPACE_ID",pr:"BUDDY_EXECUTION_PULL_REQUEST_ID"},{name:"Buildkite",constant:"BUILDKITE",env:"BUILDKITE",pr:{env:"BUILDKITE_PULL_REQUEST",ne:"false"}},{name:"CircleCI",constant:"CIRCLE",env:"CIRCLECI",pr:"CIRCLE_PULL_REQUEST"},{name:"Cirrus CI",constant:"CIRRUS",env:"CIRRUS_CI",pr:"CIRRUS_PR"},{name:"AWS CodeBuild",constant:"CODEBUILD",env:"CODEBUILD_BUILD_ARN"},{name:"Codeship",constant:"CODESHIP",env:{CI_NAME:"codeship"}},{name:"Drone",constant:"DRONE",env:"DRONE",pr:{DRONE_BUILD_EVENT:"pull_request"}},{name:"dsari",constant:"DSARI",env:"DSARI"},{name:"GitLab CI",constant:"GITLAB",env:"GITLAB_CI"},{name:"GoCD",constant:"GOCD",env:"GO_PIPELINE_LABEL"},{name:"Hudson",constant:"HUDSON",env:"HUDSON_URL"},{name:"Jenkins",constant:"JENKINS",env:["JENKINS_URL","BUILD_ID"],pr:{any:["ghprbPullId","CHANGE_ID"]}},{name:"Magnum CI",constant:"MAGNUM",env:"MAGNUM"},{name:"Netlify CI",constant:"NETLIFY",env:"NETLIFY_BUILD_BASE",pr:{env:"PULL_REQUEST",ne:"false"}},{name:"Sail CI",constant:"SAIL",env:"SAILCI",pr:"SAIL_PULL_REQUEST_NUMBER"},{name:"Semaphore",constant:"SEMAPHORE",env:"SEMAPHORE",pr:"PULL_REQUEST_NUMBER"},{name:"Shippable",constant:"SHIPPABLE",env:"SHIPPABLE",pr:{IS_PULL_REQUEST:"true"}},{name:"Solano CI",constant:"SOLANO",env:"TDDIUM",pr:"TDDIUM_PR_ID"},{name:"Strider CD",constant:"STRIDER",env:"STRIDER"},{name:"TaskCluster",constant:"TASKCLUSTER",env:["TASK_ID","RUN_ID"]},{name:"TeamCity",constant:"TEAMCITY",env:"TEAMCITY_VERSION"},{name:"Travis CI",constant:"TRAVIS",env:"TRAVIS",pr:{env:"TRAVIS_PULL_REQUEST",ne:"false"}}]});var mEe=_(dl=>{"use strict";var dEe=hEe(),pA=process.env;Object.defineProperty(dl,"_vendors",{value:dEe.map(function(t){return t.constant})});dl.name=null;dl.isPR=null;dEe.forEach(function(t){var e=Array.isArray(t.env)?t.env:[t.env],r=e.every(function(o){return gEe(o)});if(dl[t.constant]=r,r)switch(dl.name=t.name,typeof t.pr){case"string":dl.isPR=!!pA[t.pr];break;case"object":"env"in t.pr?dl.isPR=t.pr.env in pA&&pA[t.pr.env]!==t.pr.ne:"any"in t.pr?dl.isPR=t.pr.any.some(function(o){return!!pA[o]}):dl.isPR=gEe(t.pr);break;default:dl.isPR=null}});dl.isCI=!!(pA.CI||pA.CONTINUOUS_INTEGRATION||pA.BUILD_NUMBER||pA.RUN_ID||dl.name);function gEe(t){return typeof t=="string"?!!pA[t]:Object.keys(t).every(function(e){return pA[e]===t[e]})}});var EEe=_((yKt,yEe)=>{"use strict";yEe.exports=mEe().isCI});var wEe=_((EKt,CEe)=>{"use strict";var Gyt=t=>{let e=new Set;do for(let r of Reflect.ownKeys(t))e.add([t,r]);while((t=Reflect.getPrototypeOf(t))&&t!==Object.prototype);return e};CEe.exports=(t,{include:e,exclude:r}={})=>{let o=a=>{let n=u=>typeof u=="string"?a===u:u.test(a);return e?e.some(n):r?!r.some(n):!0};for(let[a,n]of Gyt(t.constructor.prototype)){if(n==="constructor"||!o(n))continue;let u=Reflect.getOwnPropertyDescriptor(a,n);u&&typeof u.value=="function"&&(t[n]=t[n].bind(t))}return t}});var bEe=_(kn=>{"use strict";Object.defineProperty(kn,"__esModule",{value:!0});var rC,j2,kk,Qk,m6;typeof window>"u"||typeof MessageChannel!="function"?(tC=null,A6=null,f6=function(){if(tC!==null)try{var t=kn.unstable_now();tC(!0,t),tC=null}catch(e){throw setTimeout(f6,0),e}},IEe=Date.now(),kn.unstable_now=function(){return Date.now()-IEe},rC=function(t){tC!==null?setTimeout(rC,0,t):(tC=t,setTimeout(f6,0))},j2=function(t,e){A6=setTimeout(t,e)},kk=function(){clearTimeout(A6)},Qk=function(){return!1},m6=kn.unstable_forceFrameRate=function(){}):(Pk=window.performance,p6=window.Date,BEe=window.setTimeout,vEe=window.clearTimeout,typeof console<"u"&&(DEe=window.cancelAnimationFrame,typeof window.requestAnimationFrame!="function"&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"),typeof DEe!="function"&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills")),typeof Pk=="object"&&typeof Pk.now=="function"?kn.unstable_now=function(){return Pk.now()}:(PEe=p6.now(),kn.unstable_now=function(){return p6.now()-PEe}),H2=!1,q2=null,Sk=-1,h6=5,g6=0,Qk=function(){return kn.unstable_now()>=g6},m6=function(){},kn.unstable_forceFrameRate=function(t){0>t||125xk(u,r))p!==void 0&&0>xk(p,u)?(t[o]=p,t[A]=r,o=A):(t[o]=u,t[n]=r,o=n);else if(p!==void 0&&0>xk(p,r))t[o]=p,t[A]=r,o=A;else break e}}return e}return null}function xk(t,e){var r=t.sortIndex-e.sortIndex;return r!==0?r:t.id-e.id}var ru=[],Th=[],Yyt=1,sa=null,Lo=3,Rk=!1,zg=!1,G2=!1;function Tk(t){for(var e=nc(Th);e!==null;){if(e.callback===null)Fk(Th);else if(e.startTime<=t)Fk(Th),e.sortIndex=e.expirationTime,y6(ru,e);else break;e=nc(Th)}}function E6(t){if(G2=!1,Tk(t),!zg)if(nc(ru)!==null)zg=!0,rC(C6);else{var e=nc(Th);e!==null&&j2(E6,e.startTime-t)}}function C6(t,e){zg=!1,G2&&(G2=!1,kk()),Rk=!0;var r=Lo;try{for(Tk(e),sa=nc(ru);sa!==null&&(!(sa.expirationTime>e)||t&&!Qk());){var o=sa.callback;if(o!==null){sa.callback=null,Lo=sa.priorityLevel;var a=o(sa.expirationTime<=e);e=kn.unstable_now(),typeof a=="function"?sa.callback=a:sa===nc(ru)&&Fk(ru),Tk(e)}else Fk(ru);sa=nc(ru)}if(sa!==null)var n=!0;else{var u=nc(Th);u!==null&&j2(E6,u.startTime-e),n=!1}return n}finally{sa=null,Lo=r,Rk=!1}}function SEe(t){switch(t){case 1:return-1;case 2:return 250;case 5:return 1073741823;case 4:return 1e4;default:return 5e3}}var Wyt=m6;kn.unstable_ImmediatePriority=1;kn.unstable_UserBlockingPriority=2;kn.unstable_NormalPriority=3;kn.unstable_IdlePriority=5;kn.unstable_LowPriority=4;kn.unstable_runWithPriority=function(t,e){switch(t){case 1:case 2:case 3:case 4:case 5:break;default:t=3}var r=Lo;Lo=t;try{return e()}finally{Lo=r}};kn.unstable_next=function(t){switch(Lo){case 1:case 2:case 3:var e=3;break;default:e=Lo}var r=Lo;Lo=e;try{return t()}finally{Lo=r}};kn.unstable_scheduleCallback=function(t,e,r){var o=kn.unstable_now();if(typeof r=="object"&&r!==null){var a=r.delay;a=typeof a=="number"&&0o?(t.sortIndex=a,y6(Th,t),nc(ru)===null&&t===nc(Th)&&(G2?kk():G2=!0,j2(E6,a-o))):(t.sortIndex=r,y6(ru,t),zg||Rk||(zg=!0,rC(C6))),t};kn.unstable_cancelCallback=function(t){t.callback=null};kn.unstable_wrapCallback=function(t){var e=Lo;return function(){var r=Lo;Lo=e;try{return t.apply(this,arguments)}finally{Lo=r}}};kn.unstable_getCurrentPriorityLevel=function(){return Lo};kn.unstable_shouldYield=function(){var t=kn.unstable_now();Tk(t);var e=nc(ru);return e!==sa&&sa!==null&&e!==null&&e.callback!==null&&e.startTime<=t&&e.expirationTime{"use strict";xEe.exports=bEe()});var kEe=_((IKt,Y2)=>{Y2.exports=function t(e){"use strict";var r=VH(),o=an(),a=w6();function n(P){for(var D="https://reactjs.org/docs/error-decoder.html?invariant="+P,T=1;Tao||(P.current=yl[ao],yl[ao]=null,ao--)}function Mn(P,D){ao++,yl[ao]=P.current,P.current=D}var Ti={},On={current:Ti},_i={current:!1},ir=Ti;function Me(P,D){var T=P.type.contextTypes;if(!T)return Ti;var q=P.stateNode;if(q&&q.__reactInternalMemoizedUnmaskedChildContext===D)return q.__reactInternalMemoizedMaskedChildContext;var W={},fe;for(fe in T)W[fe]=D[fe];return q&&(P=P.stateNode,P.__reactInternalMemoizedUnmaskedChildContext=D,P.__reactInternalMemoizedMaskedChildContext=W),W}function ii(P){return P=P.childContextTypes,P!=null}function Ha(P){Vn(_i,P),Vn(On,P)}function hr(P){Vn(_i,P),Vn(On,P)}function Ac(P,D,T){if(On.current!==Ti)throw Error(n(168));Mn(On,D,P),Mn(_i,T,P)}function fu(P,D,T){var q=P.stateNode;if(P=D.childContextTypes,typeof q.getChildContext!="function")return T;q=q.getChildContext();for(var W in q)if(!(W in P))throw Error(n(108,de(D)||"Unknown",W));return r({},T,{},q)}function fc(P){var D=P.stateNode;return D=D&&D.__reactInternalMemoizedMergedChildContext||Ti,ir=On.current,Mn(On,D,P),Mn(_i,_i.current,P),!0}function El(P,D,T){var q=P.stateNode;if(!q)throw Error(n(169));T?(D=fu(P,D,ir),q.__reactInternalMemoizedMergedChildContext=D,Vn(_i,P),Vn(On,P),Mn(On,D,P)):Vn(_i,P),Mn(_i,T,P)}var vA=a.unstable_runWithPriority,pu=a.unstable_scheduleCallback,Ie=a.unstable_cancelCallback,Tt=a.unstable_shouldYield,pc=a.unstable_requestPaint,Hi=a.unstable_now,hu=a.unstable_getCurrentPriorityLevel,Yt=a.unstable_ImmediatePriority,Cl=a.unstable_UserBlockingPriority,DA=a.unstable_NormalPriority,ap=a.unstable_LowPriority,hc=a.unstable_IdlePriority,PA={},Qn=pc!==void 0?pc:function(){},hi=null,gc=null,SA=!1,aa=Hi(),Ni=1e4>aa?Hi:function(){return Hi()-aa};function _o(){switch(hu()){case Yt:return 99;case Cl:return 98;case DA:return 97;case ap:return 96;case hc:return 95;default:throw Error(n(332))}}function Xe(P){switch(P){case 99:return Yt;case 98:return Cl;case 97:return DA;case 96:return ap;case 95:return hc;default:throw Error(n(332))}}function lo(P,D){return P=Xe(P),vA(P,D)}function dc(P,D,T){return P=Xe(P),pu(P,D,T)}function gu(P){return hi===null?(hi=[P],gc=pu(Yt,du)):hi.push(P),PA}function qi(){if(gc!==null){var P=gc;gc=null,Ie(P)}du()}function du(){if(!SA&&hi!==null){SA=!0;var P=0;try{var D=hi;lo(99,function(){for(;P=D&&(jo=!0),P.firstContext=null)}function Es(P,D){if(ca!==P&&D!==!1&&D!==0)if((typeof D!="number"||D===1073741823)&&(ca=P,D=1073741823),D={context:P,observedBits:D,next:null},Hs===null){if(co===null)throw Error(n(308));Hs=D,co.dependencies={expirationTime:0,firstContext:D,responders:null}}else Hs=Hs.next=D;return b?P._currentValue:P._currentValue2}var qs=!1;function Un(P){return{baseState:P,firstUpdate:null,lastUpdate:null,firstCapturedUpdate:null,lastCapturedUpdate:null,firstEffect:null,lastEffect:null,firstCapturedEffect:null,lastCapturedEffect:null}}function Pn(P){return{baseState:P.baseState,firstUpdate:P.firstUpdate,lastUpdate:P.lastUpdate,firstCapturedUpdate:null,lastCapturedUpdate:null,firstEffect:null,lastEffect:null,firstCapturedEffect:null,lastCapturedEffect:null}}function Cs(P,D){return{expirationTime:P,suspenseConfig:D,tag:0,payload:null,callback:null,next:null,nextEffect:null}}function We(P,D){P.lastUpdate===null?P.firstUpdate=P.lastUpdate=D:(P.lastUpdate.next=D,P.lastUpdate=D)}function tt(P,D){var T=P.alternate;if(T===null){var q=P.updateQueue,W=null;q===null&&(q=P.updateQueue=Un(P.memoizedState))}else q=P.updateQueue,W=T.updateQueue,q===null?W===null?(q=P.updateQueue=Un(P.memoizedState),W=T.updateQueue=Un(T.memoizedState)):q=P.updateQueue=Pn(W):W===null&&(W=T.updateQueue=Pn(q));W===null||q===W?We(q,D):q.lastUpdate===null||W.lastUpdate===null?(We(q,D),We(W,D)):(We(q,D),W.lastUpdate=D)}function Bt(P,D){var T=P.updateQueue;T=T===null?P.updateQueue=Un(P.memoizedState):or(P,T),T.lastCapturedUpdate===null?T.firstCapturedUpdate=T.lastCapturedUpdate=D:(T.lastCapturedUpdate.next=D,T.lastCapturedUpdate=D)}function or(P,D){var T=P.alternate;return T!==null&&D===T.updateQueue&&(D=P.updateQueue=Pn(D)),D}function ee(P,D,T,q,W,fe){switch(T.tag){case 1:return P=T.payload,typeof P=="function"?P.call(fe,q,W):P;case 3:P.effectTag=P.effectTag&-4097|64;case 0:if(P=T.payload,W=typeof P=="function"?P.call(fe,q,W):P,W==null)break;return r({},q,W);case 2:qs=!0}return q}function ye(P,D,T,q,W){qs=!1,D=or(P,D);for(var fe=D.baseState,De=null,vt=0,wt=D.firstUpdate,bt=fe;wt!==null;){var _r=wt.expirationTime;_rbn?(ai=Qr,Qr=null):ai=Qr.sibling;var tn=di(nt,Qr,At[bn],Wt);if(tn===null){Qr===null&&(Qr=ai);break}P&&Qr&&tn.alternate===null&&D(nt,Qr),Ve=fe(tn,Ve,bn),Sn===null?vr=tn:Sn.sibling=tn,Sn=tn,Qr=ai}if(bn===At.length)return T(nt,Qr),vr;if(Qr===null){for(;bnbn?(ai=Qr,Qr=null):ai=Qr.sibling;var ho=di(nt,Qr,tn.value,Wt);if(ho===null){Qr===null&&(Qr=ai);break}P&&Qr&&ho.alternate===null&&D(nt,Qr),Ve=fe(ho,Ve,bn),Sn===null?vr=ho:Sn.sibling=ho,Sn=ho,Qr=ai}if(tn.done)return T(nt,Qr),vr;if(Qr===null){for(;!tn.done;bn++,tn=At.next())tn=os(nt,tn.value,Wt),tn!==null&&(Ve=fe(tn,Ve,bn),Sn===null?vr=tn:Sn.sibling=tn,Sn=tn);return vr}for(Qr=q(nt,Qr);!tn.done;bn++,tn=At.next())tn=po(Qr,nt,bn,tn.value,Wt),tn!==null&&(P&&tn.alternate!==null&&Qr.delete(tn.key===null?bn:tn.key),Ve=fe(tn,Ve,bn),Sn===null?vr=tn:Sn.sibling=tn,Sn=tn);return P&&Qr.forEach(function(pF){return D(nt,pF)}),vr}return function(nt,Ve,At,Wt){var vr=typeof At=="object"&&At!==null&&At.type===E&&At.key===null;vr&&(At=At.props.children);var Sn=typeof At=="object"&&At!==null;if(Sn)switch(At.$$typeof){case p:e:{for(Sn=At.key,vr=Ve;vr!==null;){if(vr.key===Sn)if(vr.tag===7?At.type===E:vr.elementType===At.type){T(nt,vr.sibling),Ve=W(vr,At.type===E?At.props.children:At.props,Wt),Ve.ref=kA(nt,vr,At),Ve.return=nt,nt=Ve;break e}else{T(nt,vr);break}else D(nt,vr);vr=vr.sibling}At.type===E?(Ve=ku(At.props.children,nt.mode,Wt,At.key),Ve.return=nt,nt=Ve):(Wt=xd(At.type,At.key,At.props,null,nt.mode,Wt),Wt.ref=kA(nt,Ve,At),Wt.return=nt,nt=Wt)}return De(nt);case h:e:{for(vr=At.key;Ve!==null;){if(Ve.key===vr)if(Ve.tag===4&&Ve.stateNode.containerInfo===At.containerInfo&&Ve.stateNode.implementation===At.implementation){T(nt,Ve.sibling),Ve=W(Ve,At.children||[],Wt),Ve.return=nt,nt=Ve;break e}else{T(nt,Ve);break}else D(nt,Ve);Ve=Ve.sibling}Ve=dw(At,nt.mode,Wt),Ve.return=nt,nt=Ve}return De(nt)}if(typeof At=="string"||typeof At=="number")return At=""+At,Ve!==null&&Ve.tag===6?(T(nt,Ve.sibling),Ve=W(Ve,At,Wt),Ve.return=nt,nt=Ve):(T(nt,Ve),Ve=gw(At,nt.mode,Wt),Ve.return=nt,nt=Ve),De(nt);if(xA(At))return KA(nt,Ve,At,Wt);if(ce(At))return Yo(nt,Ve,At,Wt);if(Sn&&lp(nt,At),typeof At>"u"&&!vr)switch(nt.tag){case 1:case 0:throw nt=nt.type,Error(n(152,nt.displayName||nt.name||"Component"))}return T(nt,Ve)}}var mu=e0(!0),t0=e0(!1),yu={},uo={current:yu},QA={current:yu},yc={current:yu};function Aa(P){if(P===yu)throw Error(n(174));return P}function r0(P,D){Mn(yc,D,P),Mn(QA,P,P),Mn(uo,yu,P),D=ne(D),Vn(uo,P),Mn(uo,D,P)}function Ec(P){Vn(uo,P),Vn(QA,P),Vn(yc,P)}function hd(P){var D=Aa(yc.current),T=Aa(uo.current);D=Z(T,P.type,D),T!==D&&(Mn(QA,P,P),Mn(uo,D,P))}function n0(P){QA.current===P&&(Vn(uo,P),Vn(QA,P))}var $n={current:0};function cp(P){for(var D=P;D!==null;){if(D.tag===13){var T=D.memoizedState;if(T!==null&&(T=T.dehydrated,T===null||Os(T)||so(T)))return D}else if(D.tag===19&&D.memoizedProps.revealOrder!==void 0){if(D.effectTag&64)return D}else if(D.child!==null){D.child.return=D,D=D.child;continue}if(D===P)break;for(;D.sibling===null;){if(D.return===null||D.return===P)return null;D=D.return}D.sibling.return=D.return,D=D.sibling}return null}function i0(P,D){return{responder:P,props:D}}var FA=u.ReactCurrentDispatcher,js=u.ReactCurrentBatchConfig,Eu=0,ja=null,Gi=null,fa=null,Cu=null,ws=null,Cc=null,wc=0,Y=null,Dt=0,wl=!1,bi=null,Ic=0;function ct(){throw Error(n(321))}function wu(P,D){if(D===null)return!1;for(var T=0;Twc&&(wc=_r,bd(wc))):(uw(_r,wt.suspenseConfig),fe=wt.eagerReducer===P?wt.eagerState:P(fe,wt.action)),De=wt,wt=wt.next}while(wt!==null&&wt!==q);bt||(vt=De,W=fe),ds(fe,D.memoizedState)||(jo=!0),D.memoizedState=fe,D.baseUpdate=vt,D.baseState=W,T.lastRenderedState=fe}return[D.memoizedState,T.dispatch]}function o0(P){var D=RA();return typeof P=="function"&&(P=P()),D.memoizedState=D.baseState=P,P=D.queue={last:null,dispatch:null,lastRenderedReducer:Br,lastRenderedState:P},P=P.dispatch=A0.bind(null,ja,P),[D.memoizedState,P]}function a0(P){return Is(Br,P)}function l0(P,D,T,q){return P={tag:P,create:D,destroy:T,deps:q,next:null},Y===null?(Y={lastEffect:null},Y.lastEffect=P.next=P):(D=Y.lastEffect,D===null?Y.lastEffect=P.next=P:(T=D.next,D.next=P,P.next=T,Y.lastEffect=P)),P}function Ap(P,D,T,q){var W=RA();Dt|=P,W.memoizedState=l0(D,T,void 0,q===void 0?null:q)}function Bc(P,D,T,q){var W=up();q=q===void 0?null:q;var fe=void 0;if(Gi!==null){var De=Gi.memoizedState;if(fe=De.destroy,q!==null&&wu(q,De.deps)){l0(0,T,fe,q);return}}Dt|=P,W.memoizedState=l0(D,T,fe,q)}function Ct(P,D){return Ap(516,192,P,D)}function gd(P,D){return Bc(516,192,P,D)}function c0(P,D){if(typeof D=="function")return P=P(),D(P),function(){D(null)};if(D!=null)return P=P(),D.current=P,function(){D.current=null}}function u0(){}function Iu(P,D){return RA().memoizedState=[P,D===void 0?null:D],P}function dd(P,D){var T=up();D=D===void 0?null:D;var q=T.memoizedState;return q!==null&&D!==null&&wu(D,q[1])?q[0]:(T.memoizedState=[P,D],P)}function A0(P,D,T){if(!(25>Ic))throw Error(n(301));var q=P.alternate;if(P===ja||q!==null&&q===ja)if(wl=!0,P={expirationTime:Eu,suspenseConfig:null,action:T,eagerReducer:null,eagerState:null,next:null},bi===null&&(bi=new Map),T=bi.get(D),T===void 0)bi.set(D,P);else{for(D=T;D.next!==null;)D=D.next;D.next=P}else{var W=ma(),fe=pt.suspense;W=HA(W,P,fe),fe={expirationTime:W,suspenseConfig:fe,action:T,eagerReducer:null,eagerState:null,next:null};var De=D.last;if(De===null)fe.next=fe;else{var vt=De.next;vt!==null&&(fe.next=vt),De.next=fe}if(D.last=fe,P.expirationTime===0&&(q===null||q.expirationTime===0)&&(q=D.lastRenderedReducer,q!==null))try{var wt=D.lastRenderedState,bt=q(wt,T);if(fe.eagerReducer=q,fe.eagerState=bt,ds(bt,wt))return}catch{}finally{}bc(P,W)}}var Bu={readContext:Es,useCallback:ct,useContext:ct,useEffect:ct,useImperativeHandle:ct,useLayoutEffect:ct,useMemo:ct,useReducer:ct,useRef:ct,useState:ct,useDebugValue:ct,useResponder:ct,useDeferredValue:ct,useTransition:ct},rw={readContext:Es,useCallback:Iu,useContext:Es,useEffect:Ct,useImperativeHandle:function(P,D,T){return T=T!=null?T.concat([P]):null,Ap(4,36,c0.bind(null,D,P),T)},useLayoutEffect:function(P,D){return Ap(4,36,P,D)},useMemo:function(P,D){var T=RA();return D=D===void 0?null:D,P=P(),T.memoizedState=[P,D],P},useReducer:function(P,D,T){var q=RA();return D=T!==void 0?T(D):D,q.memoizedState=q.baseState=D,P=q.queue={last:null,dispatch:null,lastRenderedReducer:P,lastRenderedState:D},P=P.dispatch=A0.bind(null,ja,P),[q.memoizedState,P]},useRef:function(P){var D=RA();return P={current:P},D.memoizedState=P},useState:o0,useDebugValue:u0,useResponder:i0,useDeferredValue:function(P,D){var T=o0(P),q=T[0],W=T[1];return Ct(function(){a.unstable_next(function(){var fe=js.suspense;js.suspense=D===void 0?null:D;try{W(P)}finally{js.suspense=fe}})},[P,D]),q},useTransition:function(P){var D=o0(!1),T=D[0],q=D[1];return[Iu(function(W){q(!0),a.unstable_next(function(){var fe=js.suspense;js.suspense=P===void 0?null:P;try{q(!1),W()}finally{js.suspense=fe}})},[P,T]),T]}},md={readContext:Es,useCallback:dd,useContext:Es,useEffect:gd,useImperativeHandle:function(P,D,T){return T=T!=null?T.concat([P]):null,Bc(4,36,c0.bind(null,D,P),T)},useLayoutEffect:function(P,D){return Bc(4,36,P,D)},useMemo:function(P,D){var T=up();D=D===void 0?null:D;var q=T.memoizedState;return q!==null&&D!==null&&wu(D,q[1])?q[0]:(P=P(),T.memoizedState=[P,D],P)},useReducer:Is,useRef:function(){return up().memoizedState},useState:a0,useDebugValue:u0,useResponder:i0,useDeferredValue:function(P,D){var T=a0(P),q=T[0],W=T[1];return gd(function(){a.unstable_next(function(){var fe=js.suspense;js.suspense=D===void 0?null:D;try{W(P)}finally{js.suspense=fe}})},[P,D]),q},useTransition:function(P){var D=a0(!1),T=D[0],q=D[1];return[dd(function(W){q(!0),a.unstable_next(function(){var fe=js.suspense;js.suspense=P===void 0?null:P;try{q(!1),W()}finally{js.suspense=fe}})},[P,T]),T]}},pa=null,vc=null,Il=!1;function vu(P,D){var T=Dl(5,null,null,0);T.elementType="DELETED",T.type="DELETED",T.stateNode=D,T.return=P,T.effectTag=8,P.lastEffect!==null?(P.lastEffect.nextEffect=T,P.lastEffect=T):P.firstEffect=P.lastEffect=T}function f0(P,D){switch(P.tag){case 5:return D=io(D,P.type,P.pendingProps),D!==null?(P.stateNode=D,!0):!1;case 6:return D=Pi(D,P.pendingProps),D!==null?(P.stateNode=D,!0):!1;case 13:return!1;default:return!1}}function TA(P){if(Il){var D=vc;if(D){var T=D;if(!f0(P,D)){if(D=uc(T),!D||!f0(P,D)){P.effectTag=P.effectTag&-1025|2,Il=!1,pa=P;return}vu(pa,T)}pa=P,vc=Au(D)}else P.effectTag=P.effectTag&-1025|2,Il=!1,pa=P}}function fp(P){for(P=P.return;P!==null&&P.tag!==5&&P.tag!==3&&P.tag!==13;)P=P.return;pa=P}function Ga(P){if(!y||P!==pa)return!1;if(!Il)return fp(P),Il=!0,!1;var D=P.type;if(P.tag!==5||D!=="head"&&D!=="body"&&!Fe(D,P.memoizedProps))for(D=vc;D;)vu(P,D),D=uc(D);if(fp(P),P.tag===13){if(!y)throw Error(n(316));if(P=P.memoizedState,P=P!==null?P.dehydrated:null,!P)throw Error(n(317));vc=Us(P)}else vc=pa?uc(P.stateNode):null;return!0}function p0(){y&&(vc=pa=null,Il=!1)}var pp=u.ReactCurrentOwner,jo=!1;function Bs(P,D,T,q){D.child=P===null?t0(D,null,T,q):mu(D,P.child,T,q)}function wi(P,D,T,q,W){T=T.render;var fe=D.ref;return ys(D,W),q=s0(P,D,T,q,fe,W),P!==null&&!jo?(D.updateQueue=P.updateQueue,D.effectTag&=-517,P.expirationTime<=W&&(P.expirationTime=0),si(P,D,W)):(D.effectTag|=1,Bs(P,D,q,W),D.child)}function yd(P,D,T,q,W,fe){if(P===null){var De=T.type;return typeof De=="function"&&!hw(De)&&De.defaultProps===void 0&&T.compare===null&&T.defaultProps===void 0?(D.tag=15,D.type=De,Ed(P,D,De,q,W,fe)):(P=xd(T.type,null,q,null,D.mode,fe),P.ref=D.ref,P.return=D,D.child=P)}return De=P.child,WD)&&_A.set(P,D)))}}function w0(P,D){P.expirationTimeP?D:P)}function fo(P){if(P.lastExpiredTime!==0)P.callbackExpirationTime=1073741823,P.callbackPriority=99,P.callbackNode=gu(cw.bind(null,P));else{var D=Sd(P),T=P.callbackNode;if(D===0)T!==null&&(P.callbackNode=null,P.callbackExpirationTime=0,P.callbackPriority=90);else{var q=ma();if(D===1073741823?q=99:D===1||D===2?q=95:(q=10*(1073741821-D)-10*(1073741821-q),q=0>=q?99:250>=q?98:5250>=q?97:95),T!==null){var W=P.callbackPriority;if(P.callbackExpirationTime===D&&W>=q)return;T!==PA&&Ie(T)}P.callbackExpirationTime=D,P.callbackPriority=q,D=D===1073741823?gu(cw.bind(null,P)):dc(q,xv.bind(null,P),{timeout:10*(1073741821-D)-Ni()}),P.callbackNode=D}}}function xv(P,D){if(Pd=0,D)return D=ma(),kd(P,D),fo(P),null;var T=Sd(P);if(T!==0){if(D=P.callbackNode,(yr&(is|Gs))!==En)throw Error(n(327));if(Cp(),P===gi&&T===ss||bu(P,T),Mr!==null){var q=yr;yr|=is;var W=jA(P);do try{rF();break}catch(vt){qA(P,vt)}while(!0);if(ua(),yr=q,mp.current=W,Yi===Id)throw D=Bd,bu(P,T),WA(P,T),fo(P),D;if(Mr===null)switch(W=P.finishedWork=P.current.alternate,P.finishedExpirationTime=T,q=Yi,gi=null,q){case Du:case Id:throw Error(n(345));case Ii:kd(P,2=T){P.lastPingedTime=T,bu(P,T);break}}if(fe=Sd(P),fe!==0&&fe!==T)break;if(q!==0&&q!==T){P.lastPingedTime=q;break}P.timeoutHandle=Se(xu.bind(null,P),W);break}xu(P);break;case vl:if(WA(P,T),q=P.lastSuspendedTime,T===q&&(P.nextKnownPendingLevel=Aw(W)),OA&&(W=P.lastPingedTime,W===0||W>=T)){P.lastPingedTime=T,bu(P,T);break}if(W=Sd(P),W!==0&&W!==T)break;if(q!==0&&q!==T){P.lastPingedTime=q;break}if(MA!==1073741823?q=10*(1073741821-MA)-Ni():Ka===1073741823?q=0:(q=10*(1073741821-Ka)-5e3,W=Ni(),T=10*(1073741821-T)-W,q=W-q,0>q&&(q=0),q=(120>q?120:480>q?480:1080>q?1080:1920>q?1920:3e3>q?3e3:4320>q?4320:1960*iw(q/1960))-q,T=q?q=0:(W=De.busyDelayMs|0,fe=Ni()-(10*(1073741821-fe)-(De.timeoutMs|0||5e3)),q=fe<=W?0:W+q-fe),10 component higher in the tree to provide a loading indicator or placeholder to display.`+ml(W))}Yi!==Sc&&(Yi=Ii),fe=g0(fe,W),wt=q;do{switch(wt.tag){case 3:De=fe,wt.effectTag|=4096,wt.expirationTime=D;var Ve=Sv(wt,De,D);Bt(wt,Ve);break e;case 1:De=fe;var At=wt.type,Wt=wt.stateNode;if(!(wt.effectTag&64)&&(typeof At.getDerivedStateFromError=="function"||Wt!==null&&typeof Wt.componentDidCatch=="function"&&(Su===null||!Su.has(Wt)))){wt.effectTag|=4096,wt.expirationTime=D;var vr=bv(wt,De,D);Bt(wt,vr);break e}}wt=wt.return}while(wt!==null)}Mr=Rv(Mr)}catch(Sn){D=Sn;continue}break}while(!0)}function jA(){var P=mp.current;return mp.current=Bu,P===null?Bu:P}function uw(P,D){Pyp&&(yp=P)}function tF(){for(;Mr!==null;)Mr=Fv(Mr)}function rF(){for(;Mr!==null&&!Tt();)Mr=Fv(Mr)}function Fv(P){var D=Nv(P.alternate,P,ss);return P.memoizedProps=P.pendingProps,D===null&&(D=Rv(P)),sw.current=null,D}function Rv(P){Mr=P;do{var D=Mr.alternate;if(P=Mr.return,Mr.effectTag&2048){if(D=nw(Mr,ss),D!==null)return D.effectTag&=2047,D;P!==null&&(P.firstEffect=P.lastEffect=null,P.effectTag|=2048)}else{e:{var T=D;D=Mr;var q=ss,W=D.pendingProps;switch(D.tag){case 2:break;case 16:break;case 15:case 0:break;case 1:ii(D.type)&&Ha(D);break;case 3:Ec(D),hr(D),W=D.stateNode,W.pendingContext&&(W.context=W.pendingContext,W.pendingContext=null),(T===null||T.child===null)&&Ga(D)&&ga(D),Bl(D);break;case 5:n0(D);var fe=Aa(yc.current);if(q=D.type,T!==null&&D.stateNode!=null)ns(T,D,q,W,fe),T.ref!==D.ref&&(D.effectTag|=128);else if(W){if(T=Aa(uo.current),Ga(D)){if(W=D,!y)throw Error(n(175));T=sp(W.stateNode,W.type,W.memoizedProps,fe,T,W),W.updateQueue=T,T=T!==null,T&&ga(D)}else{var De=ht(q,W,fe,T,D);Dc(De,D,!1,!1),D.stateNode=De,rt(De,q,W,fe,T)&&ga(D)}D.ref!==null&&(D.effectTag|=128)}else if(D.stateNode===null)throw Error(n(166));break;case 6:if(T&&D.stateNode!=null)Yr(T,D,T.memoizedProps,W);else{if(typeof W!="string"&&D.stateNode===null)throw Error(n(166));if(T=Aa(yc.current),fe=Aa(uo.current),Ga(D)){if(T=D,!y)throw Error(n(176));(T=op(T.stateNode,T.memoizedProps,T))&&ga(D)}else D.stateNode=Ye(W,T,fe,D)}break;case 11:break;case 13:if(Vn($n,D),W=D.memoizedState,D.effectTag&64){D.expirationTime=q;break e}W=W!==null,fe=!1,T===null?D.memoizedProps.fallback!==void 0&&Ga(D):(q=T.memoizedState,fe=q!==null,W||q===null||(q=T.child.sibling,q!==null&&(De=D.firstEffect,De!==null?(D.firstEffect=q,q.nextEffect=De):(D.firstEffect=D.lastEffect=q,q.nextEffect=null),q.effectTag=8))),W&&!fe&&D.mode&2&&(T===null&&D.memoizedProps.unstable_avoidThisFallback!==!0||$n.current&1?Yi===Du&&(Yi=da):((Yi===Du||Yi===da)&&(Yi=vl),yp!==0&&gi!==null&&(WA(gi,ss),Mv(gi,yp)))),S&&W&&(D.effectTag|=4),w&&(W||fe)&&(D.effectTag|=4);break;case 7:break;case 8:break;case 12:break;case 4:Ec(D),Bl(D);break;case 10:Ci(D);break;case 9:break;case 14:break;case 17:ii(D.type)&&Ha(D);break;case 19:if(Vn($n,D),W=D.memoizedState,W===null)break;if(fe=(D.effectTag&64)!==0,De=W.rendering,De===null){if(fe)Pc(W,!1);else if(Yi!==Du||T!==null&&T.effectTag&64)for(T=D.child;T!==null;){if(De=cp(T),De!==null){for(D.effectTag|=64,Pc(W,!1),T=De.updateQueue,T!==null&&(D.updateQueue=T,D.effectTag|=4),W.lastEffect===null&&(D.firstEffect=null),D.lastEffect=W.lastEffect,T=q,W=D.child;W!==null;)fe=W,q=T,fe.effectTag&=2,fe.nextEffect=null,fe.firstEffect=null,fe.lastEffect=null,De=fe.alternate,De===null?(fe.childExpirationTime=0,fe.expirationTime=q,fe.child=null,fe.memoizedProps=null,fe.memoizedState=null,fe.updateQueue=null,fe.dependencies=null):(fe.childExpirationTime=De.childExpirationTime,fe.expirationTime=De.expirationTime,fe.child=De.child,fe.memoizedProps=De.memoizedProps,fe.memoizedState=De.memoizedState,fe.updateQueue=De.updateQueue,q=De.dependencies,fe.dependencies=q===null?null:{expirationTime:q.expirationTime,firstContext:q.firstContext,responders:q.responders}),W=W.sibling;Mn($n,$n.current&1|2,D),D=D.child;break e}T=T.sibling}}else{if(!fe)if(T=cp(De),T!==null){if(D.effectTag|=64,fe=!0,T=T.updateQueue,T!==null&&(D.updateQueue=T,D.effectTag|=4),Pc(W,!0),W.tail===null&&W.tailMode==="hidden"&&!De.alternate){D=D.lastEffect=W.lastEffect,D!==null&&(D.nextEffect=null);break}}else Ni()>W.tailExpiration&&1W&&(W=q),De>W&&(W=De),fe=fe.sibling;T.childExpirationTime=W}if(D!==null)return D;P!==null&&!(P.effectTag&2048)&&(P.firstEffect===null&&(P.firstEffect=Mr.firstEffect),Mr.lastEffect!==null&&(P.lastEffect!==null&&(P.lastEffect.nextEffect=Mr.firstEffect),P.lastEffect=Mr.lastEffect),1P?D:P}function xu(P){var D=_o();return lo(99,nF.bind(null,P,D)),null}function nF(P,D){do Cp();while(E0!==null);if((yr&(is|Gs))!==En)throw Error(n(327));var T=P.finishedWork,q=P.finishedExpirationTime;if(T===null)return null;if(P.finishedWork=null,P.finishedExpirationTime=0,T===P.current)throw Error(n(177));P.callbackNode=null,P.callbackExpirationTime=0,P.callbackPriority=90,P.nextKnownPendingLevel=0;var W=Aw(T);if(P.firstPendingTime=W,q<=P.lastSuspendedTime?P.firstSuspendedTime=P.lastSuspendedTime=P.nextKnownPendingLevel=0:q<=P.firstSuspendedTime&&(P.firstSuspendedTime=q-1),q<=P.lastPingedTime&&(P.lastPingedTime=0),q<=P.lastExpiredTime&&(P.lastExpiredTime=0),P===gi&&(Mr=gi=null,ss=0),1=T?cn(P,D,T):(Mn($n,$n.current&1,D),D=si(P,D,T),D!==null?D.sibling:null);Mn($n,$n.current&1,D);break;case 19:if(q=D.childExpirationTime>=T,P.effectTag&64){if(q)return Ya(P,D,T);D.effectTag|=64}if(W=D.memoizedState,W!==null&&(W.rendering=null,W.tail=null),Mn($n,$n.current,D),!q)return null}return si(P,D,T)}jo=!1}}else jo=!1;switch(D.expirationTime=0,D.tag){case 2:if(q=D.type,P!==null&&(P.alternate=null,D.alternate=null,D.effectTag|=2),P=D.pendingProps,W=Me(D,On.current),ys(D,T),W=s0(null,D,q,P,W,T),D.effectTag|=1,typeof W=="object"&&W!==null&&typeof W.render=="function"&&W.$$typeof===void 0){if(D.tag=1,tw(),ii(q)){var fe=!0;fc(D)}else fe=!1;D.memoizedState=W.state!==null&&W.state!==void 0?W.state:null;var De=q.getDerivedStateFromProps;typeof De=="function"&&rr(D,q,De,P),W.updater=$r,D.stateNode=W,W._reactInternalFiber=D,qo(D,q,P,T),D=gp(null,D,q,!0,fe,T)}else D.tag=0,Bs(null,D,W,T),D=D.child;return D;case 16:if(W=D.elementType,P!==null&&(P.alternate=null,D.alternate=null,D.effectTag|=2),P=D.pendingProps,Ce(W),W._status!==1)throw W._result;switch(W=W._result,D.type=W,fe=D.tag=uF(W),P=Ei(W,P),fe){case 0:D=NA(null,D,W,P,T);break;case 1:D=hp(null,D,W,P,T);break;case 11:D=wi(null,D,W,P,T);break;case 14:D=yd(null,D,W,Ei(W.type,P),q,T);break;default:throw Error(n(306,W,""))}return D;case 0:return q=D.type,W=D.pendingProps,W=D.elementType===q?W:Ei(q,W),NA(P,D,q,W,T);case 1:return q=D.type,W=D.pendingProps,W=D.elementType===q?W:Ei(q,W),hp(P,D,q,W,T);case 3:if(h0(D),q=D.updateQueue,q===null)throw Error(n(282));if(W=D.memoizedState,W=W!==null?W.element:null,ye(D,q,D.pendingProps,null,T),q=D.memoizedState.element,q===W)p0(),D=si(P,D,T);else{if((W=D.stateNode.hydrate)&&(y?(vc=Au(D.stateNode.containerInfo),pa=D,W=Il=!0):W=!1),W)for(T=t0(D,null,q,T),D.child=T;T;)T.effectTag=T.effectTag&-3|1024,T=T.sibling;else Bs(P,D,q,T),p0();D=D.child}return D;case 5:return hd(D),P===null&&TA(D),q=D.type,W=D.pendingProps,fe=P!==null?P.memoizedProps:null,De=W.children,Fe(q,W)?De=null:fe!==null&&Fe(q,fe)&&(D.effectTag|=16),Go(P,D),D.mode&4&&T!==1&&ke(q,W)?(D.expirationTime=D.childExpirationTime=1,D=null):(Bs(P,D,De,T),D=D.child),D;case 6:return P===null&&TA(D),null;case 13:return cn(P,D,T);case 4:return r0(D,D.stateNode.containerInfo),q=D.pendingProps,P===null?D.child=mu(D,null,q,T):Bs(P,D,q,T),D.child;case 11:return q=D.type,W=D.pendingProps,W=D.elementType===q?W:Ei(q,W),wi(P,D,q,W,T);case 7:return Bs(P,D,D.pendingProps,T),D.child;case 8:return Bs(P,D,D.pendingProps.children,T),D.child;case 12:return Bs(P,D,D.pendingProps.children,T),D.child;case 10:e:{if(q=D.type._context,W=D.pendingProps,De=D.memoizedProps,fe=W.value,Ho(D,fe),De!==null){var vt=De.value;if(fe=ds(vt,fe)?0:(typeof q._calculateChangedBits=="function"?q._calculateChangedBits(vt,fe):1073741823)|0,fe===0){if(De.children===W.children&&!_i.current){D=si(P,D,T);break e}}else for(vt=D.child,vt!==null&&(vt.return=D);vt!==null;){var wt=vt.dependencies;if(wt!==null){De=vt.child;for(var bt=wt.firstContext;bt!==null;){if(bt.context===q&&bt.observedBits&fe){vt.tag===1&&(bt=Cs(T,null),bt.tag=2,tt(vt,bt)),vt.expirationTime"u")return!1;var D=__REACT_DEVTOOLS_GLOBAL_HOOK__;if(D.isDisabled||!D.supportsFiber)return!0;try{var T=D.inject(P);fw=function(q){try{D.onCommitFiberRoot(T,q,void 0,(q.current.effectTag&64)===64)}catch{}},pw=function(q){try{D.onCommitFiberUnmount(T,q)}catch{}}}catch{}return!0}function cF(P,D,T,q){this.tag=P,this.key=T,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=D,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=q,this.effectTag=0,this.lastEffect=this.firstEffect=this.nextEffect=null,this.childExpirationTime=this.expirationTime=0,this.alternate=null}function Dl(P,D,T,q){return new cF(P,D,T,q)}function hw(P){return P=P.prototype,!(!P||!P.isReactComponent)}function uF(P){if(typeof P=="function")return hw(P)?1:0;if(P!=null){if(P=P.$$typeof,P===L)return 11;if(P===te)return 14}return 2}function YA(P,D){var T=P.alternate;return T===null?(T=Dl(P.tag,D,P.key,P.mode),T.elementType=P.elementType,T.type=P.type,T.stateNode=P.stateNode,T.alternate=P,P.alternate=T):(T.pendingProps=D,T.effectTag=0,T.nextEffect=null,T.firstEffect=null,T.lastEffect=null),T.childExpirationTime=P.childExpirationTime,T.expirationTime=P.expirationTime,T.child=P.child,T.memoizedProps=P.memoizedProps,T.memoizedState=P.memoizedState,T.updateQueue=P.updateQueue,D=P.dependencies,T.dependencies=D===null?null:{expirationTime:D.expirationTime,firstContext:D.firstContext,responders:D.responders},T.sibling=P.sibling,T.index=P.index,T.ref=P.ref,T}function xd(P,D,T,q,W,fe){var De=2;if(q=P,typeof P=="function")hw(P)&&(De=1);else if(typeof P=="string")De=5;else e:switch(P){case E:return ku(T.children,W,fe,D);case R:De=8,W|=7;break;case I:De=8,W|=1;break;case v:return P=Dl(12,T,D,W|8),P.elementType=v,P.type=v,P.expirationTime=fe,P;case U:return P=Dl(13,T,D,W),P.type=U,P.elementType=U,P.expirationTime=fe,P;case z:return P=Dl(19,T,D,W),P.elementType=z,P.expirationTime=fe,P;default:if(typeof P=="object"&&P!==null)switch(P.$$typeof){case x:De=10;break e;case C:De=9;break e;case L:De=11;break e;case te:De=14;break e;case ae:De=16,q=null;break e}throw Error(n(130,P==null?P:typeof P,""))}return D=Dl(De,T,D,W),D.elementType=P,D.type=q,D.expirationTime=fe,D}function ku(P,D,T,q){return P=Dl(7,P,q,D),P.expirationTime=T,P}function gw(P,D,T){return P=Dl(6,P,null,D),P.expirationTime=T,P}function dw(P,D,T){return D=Dl(4,P.children!==null?P.children:[],P.key,D),D.expirationTime=T,D.stateNode={containerInfo:P.containerInfo,pendingChildren:null,implementation:P.implementation},D}function AF(P,D,T){this.tag=D,this.current=null,this.containerInfo=P,this.pingCache=this.pendingChildren=null,this.finishedExpirationTime=0,this.finishedWork=null,this.timeoutHandle=Ue,this.pendingContext=this.context=null,this.hydrate=T,this.callbackNode=null,this.callbackPriority=90,this.lastExpiredTime=this.lastPingedTime=this.nextKnownPendingLevel=this.lastSuspendedTime=this.firstSuspendedTime=this.firstPendingTime=0}function Lv(P,D){var T=P.firstSuspendedTime;return P=P.lastSuspendedTime,T!==0&&T>=D&&P<=D}function WA(P,D){var T=P.firstSuspendedTime,q=P.lastSuspendedTime;TD||T===0)&&(P.lastSuspendedTime=D),D<=P.lastPingedTime&&(P.lastPingedTime=0),D<=P.lastExpiredTime&&(P.lastExpiredTime=0)}function Mv(P,D){D>P.firstPendingTime&&(P.firstPendingTime=D);var T=P.firstSuspendedTime;T!==0&&(D>=T?P.firstSuspendedTime=P.lastSuspendedTime=P.nextKnownPendingLevel=0:D>=P.lastSuspendedTime&&(P.lastSuspendedTime=D+1),D>P.nextKnownPendingLevel&&(P.nextKnownPendingLevel=D))}function kd(P,D){var T=P.lastExpiredTime;(T===0||T>D)&&(P.lastExpiredTime=D)}function Ov(P){var D=P._reactInternalFiber;if(D===void 0)throw typeof P.render=="function"?Error(n(188)):Error(n(268,Object.keys(P)));return P=me(D),P===null?null:P.stateNode}function Uv(P,D){P=P.memoizedState,P!==null&&P.dehydrated!==null&&P.retryTime{"use strict";QEe.exports=kEe()});var TEe=_((vKt,REe)=>{"use strict";var Kyt={ALIGN_COUNT:8,ALIGN_AUTO:0,ALIGN_FLEX_START:1,ALIGN_CENTER:2,ALIGN_FLEX_END:3,ALIGN_STRETCH:4,ALIGN_BASELINE:5,ALIGN_SPACE_BETWEEN:6,ALIGN_SPACE_AROUND:7,DIMENSION_COUNT:2,DIMENSION_WIDTH:0,DIMENSION_HEIGHT:1,DIRECTION_COUNT:3,DIRECTION_INHERIT:0,DIRECTION_LTR:1,DIRECTION_RTL:2,DISPLAY_COUNT:2,DISPLAY_FLEX:0,DISPLAY_NONE:1,EDGE_COUNT:9,EDGE_LEFT:0,EDGE_TOP:1,EDGE_RIGHT:2,EDGE_BOTTOM:3,EDGE_START:4,EDGE_END:5,EDGE_HORIZONTAL:6,EDGE_VERTICAL:7,EDGE_ALL:8,EXPERIMENTAL_FEATURE_COUNT:1,EXPERIMENTAL_FEATURE_WEB_FLEX_BASIS:0,FLEX_DIRECTION_COUNT:4,FLEX_DIRECTION_COLUMN:0,FLEX_DIRECTION_COLUMN_REVERSE:1,FLEX_DIRECTION_ROW:2,FLEX_DIRECTION_ROW_REVERSE:3,JUSTIFY_COUNT:6,JUSTIFY_FLEX_START:0,JUSTIFY_CENTER:1,JUSTIFY_FLEX_END:2,JUSTIFY_SPACE_BETWEEN:3,JUSTIFY_SPACE_AROUND:4,JUSTIFY_SPACE_EVENLY:5,LOG_LEVEL_COUNT:6,LOG_LEVEL_ERROR:0,LOG_LEVEL_WARN:1,LOG_LEVEL_INFO:2,LOG_LEVEL_DEBUG:3,LOG_LEVEL_VERBOSE:4,LOG_LEVEL_FATAL:5,MEASURE_MODE_COUNT:3,MEASURE_MODE_UNDEFINED:0,MEASURE_MODE_EXACTLY:1,MEASURE_MODE_AT_MOST:2,NODE_TYPE_COUNT:2,NODE_TYPE_DEFAULT:0,NODE_TYPE_TEXT:1,OVERFLOW_COUNT:3,OVERFLOW_VISIBLE:0,OVERFLOW_HIDDEN:1,OVERFLOW_SCROLL:2,POSITION_TYPE_COUNT:2,POSITION_TYPE_RELATIVE:0,POSITION_TYPE_ABSOLUTE:1,PRINT_OPTIONS_COUNT:3,PRINT_OPTIONS_LAYOUT:1,PRINT_OPTIONS_STYLE:2,PRINT_OPTIONS_CHILDREN:4,UNIT_COUNT:4,UNIT_UNDEFINED:0,UNIT_POINT:1,UNIT_PERCENT:2,UNIT_AUTO:3,WRAP_COUNT:3,WRAP_NO_WRAP:0,WRAP_WRAP:1,WRAP_WRAP_REVERSE:2};REe.exports=Kyt});var OEe=_((DKt,MEe)=>{"use strict";var Vyt=Object.assign||function(t){for(var e=1;e"}}]),t}(),NEe=function(){Nk(t,null,[{key:"fromJS",value:function(r){var o=r.width,a=r.height;return new t(o,a)}}]);function t(e,r){B6(this,t),this.width=e,this.height=r}return Nk(t,[{key:"fromJS",value:function(r){r(this.width,this.height)}},{key:"toString",value:function(){return""}}]),t}(),LEe=function(){function t(e,r){B6(this,t),this.unit=e,this.value=r}return Nk(t,[{key:"fromJS",value:function(r){r(this.unit,this.value)}},{key:"toString",value:function(){switch(this.unit){case nu.UNIT_POINT:return String(this.value);case nu.UNIT_PERCENT:return this.value+"%";case nu.UNIT_AUTO:return"auto";default:return this.value+"?"}}},{key:"valueOf",value:function(){return this.value}}]),t}();MEe.exports=function(t,e){function r(u,A,p){var h=u[A];u[A]=function(){for(var E=arguments.length,I=Array(E),v=0;v1?I-1:0),x=1;x1&&arguments[1]!==void 0?arguments[1]:NaN,p=arguments.length>2&&arguments[2]!==void 0?arguments[2]:NaN,h=arguments.length>3&&arguments[3]!==void 0?arguments[3]:nu.DIRECTION_LTR;return u.call(this,A,p,h)}),Vyt({Config:e.Config,Node:e.Node,Layout:t("Layout",zyt),Size:t("Size",NEe),Value:t("Value",LEe),getInstanceCount:function(){return e.getInstanceCount.apply(e,arguments)}},nu)}});var UEe=_((exports,module)=>{(function(t,e){typeof define=="function"&&define.amd?define([],function(){return e}):typeof module=="object"&&module.exports?module.exports=e:(t.nbind=t.nbind||{}).init=e})(exports,function(Module,cb){typeof Module=="function"&&(cb=Module,Module={}),Module.onRuntimeInitialized=function(t,e){return function(){t&&t.apply(this,arguments);try{Module.ccall("nbind_init")}catch(r){e(r);return}e(null,{bind:Module._nbind_value,reflect:Module.NBind.reflect,queryType:Module.NBind.queryType,toggleLightGC:Module.toggleLightGC,lib:Module})}}(Module.onRuntimeInitialized,cb);var Module;Module||(Module=(typeof Module<"u"?Module:null)||{});var moduleOverrides={};for(var key in Module)Module.hasOwnProperty(key)&&(moduleOverrides[key]=Module[key]);var ENVIRONMENT_IS_WEB=!1,ENVIRONMENT_IS_WORKER=!1,ENVIRONMENT_IS_NODE=!1,ENVIRONMENT_IS_SHELL=!1;if(Module.ENVIRONMENT)if(Module.ENVIRONMENT==="WEB")ENVIRONMENT_IS_WEB=!0;else if(Module.ENVIRONMENT==="WORKER")ENVIRONMENT_IS_WORKER=!0;else if(Module.ENVIRONMENT==="NODE")ENVIRONMENT_IS_NODE=!0;else if(Module.ENVIRONMENT==="SHELL")ENVIRONMENT_IS_SHELL=!0;else throw new Error("The provided Module['ENVIRONMENT'] value is not valid. It must be one of: WEB|WORKER|NODE|SHELL.");else ENVIRONMENT_IS_WEB=typeof window=="object",ENVIRONMENT_IS_WORKER=typeof importScripts=="function",ENVIRONMENT_IS_NODE=typeof process=="object"&&typeof ve=="function"&&!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_WORKER,ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;if(ENVIRONMENT_IS_NODE){Module.print||(Module.print=console.log),Module.printErr||(Module.printErr=console.warn);var nodeFS,nodePath;Module.read=function(e,r){nodeFS||(nodeFS={}("")),nodePath||(nodePath={}("")),e=nodePath.normalize(e);var o=nodeFS.readFileSync(e);return r?o:o.toString()},Module.readBinary=function(e){var r=Module.read(e,!0);return r.buffer||(r=new Uint8Array(r)),assert(r.buffer),r},Module.load=function(e){globalEval(read(e))},Module.thisProgram||(process.argv.length>1?Module.thisProgram=process.argv[1].replace(/\\/g,"/"):Module.thisProgram="unknown-program"),Module.arguments=process.argv.slice(2),typeof module<"u"&&(module.exports=Module),Module.inspect=function(){return"[Emscripten Module object]"}}else if(ENVIRONMENT_IS_SHELL)Module.print||(Module.print=print),typeof printErr<"u"&&(Module.printErr=printErr),typeof read<"u"?Module.read=read:Module.read=function(){throw"no read() available"},Module.readBinary=function(e){if(typeof readbuffer=="function")return new Uint8Array(readbuffer(e));var r=read(e,"binary");return assert(typeof r=="object"),r},typeof scriptArgs<"u"?Module.arguments=scriptArgs:typeof arguments<"u"&&(Module.arguments=arguments),typeof quit=="function"&&(Module.quit=function(t,e){quit(t)});else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(Module.read=function(e){var r=new XMLHttpRequest;return r.open("GET",e,!1),r.send(null),r.responseText},ENVIRONMENT_IS_WORKER&&(Module.readBinary=function(e){var r=new XMLHttpRequest;return r.open("GET",e,!1),r.responseType="arraybuffer",r.send(null),new Uint8Array(r.response)}),Module.readAsync=function(e,r,o){var a=new XMLHttpRequest;a.open("GET",e,!0),a.responseType="arraybuffer",a.onload=function(){a.status==200||a.status==0&&a.response?r(a.response):o()},a.onerror=o,a.send(null)},typeof arguments<"u"&&(Module.arguments=arguments),typeof console<"u")Module.print||(Module.print=function(e){console.log(e)}),Module.printErr||(Module.printErr=function(e){console.warn(e)});else{var TRY_USE_DUMP=!1;Module.print||(Module.print=TRY_USE_DUMP&&typeof dump<"u"?function(t){dump(t)}:function(t){})}ENVIRONMENT_IS_WORKER&&(Module.load=importScripts),typeof Module.setWindowTitle>"u"&&(Module.setWindowTitle=function(t){document.title=t})}else throw"Unknown runtime environment. Where are we?";function globalEval(t){eval.call(null,t)}!Module.load&&Module.read&&(Module.load=function(e){globalEval(Module.read(e))}),Module.print||(Module.print=function(){}),Module.printErr||(Module.printErr=Module.print),Module.arguments||(Module.arguments=[]),Module.thisProgram||(Module.thisProgram="./this.program"),Module.quit||(Module.quit=function(t,e){throw e}),Module.print=Module.print,Module.printErr=Module.printErr,Module.preRun=[],Module.postRun=[];for(var key in moduleOverrides)moduleOverrides.hasOwnProperty(key)&&(Module[key]=moduleOverrides[key]);moduleOverrides=void 0;var Runtime={setTempRet0:function(t){return tempRet0=t,t},getTempRet0:function(){return tempRet0},stackSave:function(){return STACKTOP},stackRestore:function(t){STACKTOP=t},getNativeTypeSize:function(t){switch(t){case"i1":case"i8":return 1;case"i16":return 2;case"i32":return 4;case"i64":return 8;case"float":return 4;case"double":return 8;default:{if(t[t.length-1]==="*")return Runtime.QUANTUM_SIZE;if(t[0]==="i"){var e=parseInt(t.substr(1));return assert(e%8===0),e/8}else return 0}}},getNativeFieldSize:function(t){return Math.max(Runtime.getNativeTypeSize(t),Runtime.QUANTUM_SIZE)},STACK_ALIGN:16,prepVararg:function(t,e){return e==="double"||e==="i64"?t&7&&(assert((t&7)===4),t+=4):assert((t&3)===0),t},getAlignSize:function(t,e,r){return!r&&(t=="i64"||t=="double")?8:t?Math.min(e||(t?Runtime.getNativeFieldSize(t):0),Runtime.QUANTUM_SIZE):Math.min(e,8)},dynCall:function(t,e,r){return r&&r.length?Module["dynCall_"+t].apply(null,[e].concat(r)):Module["dynCall_"+t].call(null,e)},functionPointers:[],addFunction:function(t){for(var e=0;e>2],r=(e+t+15|0)&-16;if(HEAP32[DYNAMICTOP_PTR>>2]=r,r>=TOTAL_MEMORY){var o=enlargeMemory();if(!o)return HEAP32[DYNAMICTOP_PTR>>2]=e,0}return e},alignMemory:function(t,e){var r=t=Math.ceil(t/(e||16))*(e||16);return r},makeBigInt:function(t,e,r){var o=r?+(t>>>0)+ +(e>>>0)*4294967296:+(t>>>0)+ +(e|0)*4294967296;return o},GLOBAL_BASE:8,QUANTUM_SIZE:4,__dummy__:0};Module.Runtime=Runtime;var ABORT=0,EXITSTATUS=0;function assert(t,e){t||abort("Assertion failed: "+e)}function getCFunc(ident){var func=Module["_"+ident];if(!func)try{func=eval("_"+ident)}catch(t){}return assert(func,"Cannot call unknown function "+ident+" (perhaps LLVM optimizations or closure removed it?)"),func}var cwrap,ccall;(function(){var JSfuncs={stackSave:function(){Runtime.stackSave()},stackRestore:function(){Runtime.stackRestore()},arrayToC:function(t){var e=Runtime.stackAlloc(t.length);return writeArrayToMemory(t,e),e},stringToC:function(t){var e=0;if(t!=null&&t!==0){var r=(t.length<<2)+1;e=Runtime.stackAlloc(r),stringToUTF8(t,e,r)}return e}},toC={string:JSfuncs.stringToC,array:JSfuncs.arrayToC};ccall=function(e,r,o,a,n){var u=getCFunc(e),A=[],p=0;if(a)for(var h=0;h>0]=e;break;case"i8":HEAP8[t>>0]=e;break;case"i16":HEAP16[t>>1]=e;break;case"i32":HEAP32[t>>2]=e;break;case"i64":tempI64=[e>>>0,(tempDouble=e,+Math_abs(tempDouble)>=1?tempDouble>0?(Math_min(+Math_floor(tempDouble/4294967296),4294967295)|0)>>>0:~~+Math_ceil((tempDouble-+(~~tempDouble>>>0))/4294967296)>>>0:0)],HEAP32[t>>2]=tempI64[0],HEAP32[t+4>>2]=tempI64[1];break;case"float":HEAPF32[t>>2]=e;break;case"double":HEAPF64[t>>3]=e;break;default:abort("invalid type for setValue: "+r)}}Module.setValue=setValue;function getValue(t,e,r){switch(e=e||"i8",e.charAt(e.length-1)==="*"&&(e="i32"),e){case"i1":return HEAP8[t>>0];case"i8":return HEAP8[t>>0];case"i16":return HEAP16[t>>1];case"i32":return HEAP32[t>>2];case"i64":return HEAP32[t>>2];case"float":return HEAPF32[t>>2];case"double":return HEAPF64[t>>3];default:abort("invalid type for setValue: "+e)}return null}Module.getValue=getValue;var ALLOC_NORMAL=0,ALLOC_STACK=1,ALLOC_STATIC=2,ALLOC_DYNAMIC=3,ALLOC_NONE=4;Module.ALLOC_NORMAL=ALLOC_NORMAL,Module.ALLOC_STACK=ALLOC_STACK,Module.ALLOC_STATIC=ALLOC_STATIC,Module.ALLOC_DYNAMIC=ALLOC_DYNAMIC,Module.ALLOC_NONE=ALLOC_NONE;function allocate(t,e,r,o){var a,n;typeof t=="number"?(a=!0,n=t):(a=!1,n=t.length);var u=typeof e=="string"?e:null,A;if(r==ALLOC_NONE?A=o:A=[typeof _malloc=="function"?_malloc:Runtime.staticAlloc,Runtime.stackAlloc,Runtime.staticAlloc,Runtime.dynamicAlloc][r===void 0?ALLOC_STATIC:r](Math.max(n,u?1:e.length)),a){var o=A,p;for(assert((A&3)==0),p=A+(n&-4);o>2]=0;for(p=A+n;o>0]=0;return A}if(u==="i8")return t.subarray||t.slice?HEAPU8.set(t,A):HEAPU8.set(new Uint8Array(t),A),A;for(var h=0,E,I,v;h>0],r|=o,!(o==0&&!e||(a++,e&&a==e)););e||(e=a);var n="";if(r<128){for(var u=1024,A;e>0;)A=String.fromCharCode.apply(String,HEAPU8.subarray(t,t+Math.min(e,u))),n=n?n+A:A,t+=u,e-=u;return n}return Module.UTF8ToString(t)}Module.Pointer_stringify=Pointer_stringify;function AsciiToString(t){for(var e="";;){var r=HEAP8[t++>>0];if(!r)return e;e+=String.fromCharCode(r)}}Module.AsciiToString=AsciiToString;function stringToAscii(t,e){return writeAsciiToMemory(t,e,!1)}Module.stringToAscii=stringToAscii;var UTF8Decoder=typeof TextDecoder<"u"?new TextDecoder("utf8"):void 0;function UTF8ArrayToString(t,e){for(var r=e;t[r];)++r;if(r-e>16&&t.subarray&&UTF8Decoder)return UTF8Decoder.decode(t.subarray(e,r));for(var o,a,n,u,A,p,h="";;){if(o=t[e++],!o)return h;if(!(o&128)){h+=String.fromCharCode(o);continue}if(a=t[e++]&63,(o&224)==192){h+=String.fromCharCode((o&31)<<6|a);continue}if(n=t[e++]&63,(o&240)==224?o=(o&15)<<12|a<<6|n:(u=t[e++]&63,(o&248)==240?o=(o&7)<<18|a<<12|n<<6|u:(A=t[e++]&63,(o&252)==248?o=(o&3)<<24|a<<18|n<<12|u<<6|A:(p=t[e++]&63,o=(o&1)<<30|a<<24|n<<18|u<<12|A<<6|p))),o<65536)h+=String.fromCharCode(o);else{var E=o-65536;h+=String.fromCharCode(55296|E>>10,56320|E&1023)}}}Module.UTF8ArrayToString=UTF8ArrayToString;function UTF8ToString(t){return UTF8ArrayToString(HEAPU8,t)}Module.UTF8ToString=UTF8ToString;function stringToUTF8Array(t,e,r,o){if(!(o>0))return 0;for(var a=r,n=r+o-1,u=0;u=55296&&A<=57343&&(A=65536+((A&1023)<<10)|t.charCodeAt(++u)&1023),A<=127){if(r>=n)break;e[r++]=A}else if(A<=2047){if(r+1>=n)break;e[r++]=192|A>>6,e[r++]=128|A&63}else if(A<=65535){if(r+2>=n)break;e[r++]=224|A>>12,e[r++]=128|A>>6&63,e[r++]=128|A&63}else if(A<=2097151){if(r+3>=n)break;e[r++]=240|A>>18,e[r++]=128|A>>12&63,e[r++]=128|A>>6&63,e[r++]=128|A&63}else if(A<=67108863){if(r+4>=n)break;e[r++]=248|A>>24,e[r++]=128|A>>18&63,e[r++]=128|A>>12&63,e[r++]=128|A>>6&63,e[r++]=128|A&63}else{if(r+5>=n)break;e[r++]=252|A>>30,e[r++]=128|A>>24&63,e[r++]=128|A>>18&63,e[r++]=128|A>>12&63,e[r++]=128|A>>6&63,e[r++]=128|A&63}}return e[r]=0,r-a}Module.stringToUTF8Array=stringToUTF8Array;function stringToUTF8(t,e,r){return stringToUTF8Array(t,HEAPU8,e,r)}Module.stringToUTF8=stringToUTF8;function lengthBytesUTF8(t){for(var e=0,r=0;r=55296&&o<=57343&&(o=65536+((o&1023)<<10)|t.charCodeAt(++r)&1023),o<=127?++e:o<=2047?e+=2:o<=65535?e+=3:o<=2097151?e+=4:o<=67108863?e+=5:e+=6}return e}Module.lengthBytesUTF8=lengthBytesUTF8;var UTF16Decoder=typeof TextDecoder<"u"?new TextDecoder("utf-16le"):void 0;function demangle(t){var e=Module.___cxa_demangle||Module.__cxa_demangle;if(e){try{var r=t.substr(1),o=lengthBytesUTF8(r)+1,a=_malloc(o);stringToUTF8(r,a,o);var n=_malloc(4),u=e(a,0,0,n);if(getValue(n,"i32")===0&&u)return Pointer_stringify(u)}catch{}finally{a&&_free(a),n&&_free(n),u&&_free(u)}return t}return Runtime.warnOnce("warning: build with -s DEMANGLE_SUPPORT=1 to link in libcxxabi demangling"),t}function demangleAll(t){var e=/__Z[\w\d_]+/g;return t.replace(e,function(r){var o=demangle(r);return r===o?r:r+" ["+o+"]"})}function jsStackTrace(){var t=new Error;if(!t.stack){try{throw new Error(0)}catch(e){t=e}if(!t.stack)return"(no stack trace available)"}return t.stack.toString()}function stackTrace(){var t=jsStackTrace();return Module.extraStackTrace&&(t+=` +`+Module.extraStackTrace()),demangleAll(t)}Module.stackTrace=stackTrace;var HEAP,buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferViews(){Module.HEAP8=HEAP8=new Int8Array(buffer),Module.HEAP16=HEAP16=new Int16Array(buffer),Module.HEAP32=HEAP32=new Int32Array(buffer),Module.HEAPU8=HEAPU8=new Uint8Array(buffer),Module.HEAPU16=HEAPU16=new Uint16Array(buffer),Module.HEAPU32=HEAPU32=new Uint32Array(buffer),Module.HEAPF32=HEAPF32=new Float32Array(buffer),Module.HEAPF64=HEAPF64=new Float64Array(buffer)}var STATIC_BASE,STATICTOP,staticSealed,STACK_BASE,STACKTOP,STACK_MAX,DYNAMIC_BASE,DYNAMICTOP_PTR;STATIC_BASE=STATICTOP=STACK_BASE=STACKTOP=STACK_MAX=DYNAMIC_BASE=DYNAMICTOP_PTR=0,staticSealed=!1;function abortOnCannotGrowMemory(){abort("Cannot enlarge memory arrays. Either (1) compile with -s TOTAL_MEMORY=X with X higher than the current value "+TOTAL_MEMORY+", (2) compile with -s ALLOW_MEMORY_GROWTH=1 which allows increasing the size at runtime but prevents some optimizations, (3) set Module.TOTAL_MEMORY to a higher value before the program runs, or (4) if you want malloc to return NULL (0) instead of this abort, compile with -s ABORTING_MALLOC=0 ")}function enlargeMemory(){abortOnCannotGrowMemory()}var TOTAL_STACK=Module.TOTAL_STACK||5242880,TOTAL_MEMORY=Module.TOTAL_MEMORY||134217728;TOTAL_MEMORY0;){var e=t.shift();if(typeof e=="function"){e();continue}var r=e.func;typeof r=="number"?e.arg===void 0?Module.dynCall_v(r):Module.dynCall_vi(r,e.arg):r(e.arg===void 0?null:e.arg)}}var __ATPRERUN__=[],__ATINIT__=[],__ATMAIN__=[],__ATEXIT__=[],__ATPOSTRUN__=[],runtimeInitialized=!1,runtimeExited=!1;function preRun(){if(Module.preRun)for(typeof Module.preRun=="function"&&(Module.preRun=[Module.preRun]);Module.preRun.length;)addOnPreRun(Module.preRun.shift());callRuntimeCallbacks(__ATPRERUN__)}function ensureInitRuntime(){runtimeInitialized||(runtimeInitialized=!0,callRuntimeCallbacks(__ATINIT__))}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){callRuntimeCallbacks(__ATEXIT__),runtimeExited=!0}function postRun(){if(Module.postRun)for(typeof Module.postRun=="function"&&(Module.postRun=[Module.postRun]);Module.postRun.length;)addOnPostRun(Module.postRun.shift());callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(t){__ATPRERUN__.unshift(t)}Module.addOnPreRun=addOnPreRun;function addOnInit(t){__ATINIT__.unshift(t)}Module.addOnInit=addOnInit;function addOnPreMain(t){__ATMAIN__.unshift(t)}Module.addOnPreMain=addOnPreMain;function addOnExit(t){__ATEXIT__.unshift(t)}Module.addOnExit=addOnExit;function addOnPostRun(t){__ATPOSTRUN__.unshift(t)}Module.addOnPostRun=addOnPostRun;function intArrayFromString(t,e,r){var o=r>0?r:lengthBytesUTF8(t)+1,a=new Array(o),n=stringToUTF8Array(t,a,0,a.length);return e&&(a.length=n),a}Module.intArrayFromString=intArrayFromString;function intArrayToString(t){for(var e=[],r=0;r255&&(o&=255),e.push(String.fromCharCode(o))}return e.join("")}Module.intArrayToString=intArrayToString;function writeStringToMemory(t,e,r){Runtime.warnOnce("writeStringToMemory is deprecated and should not be called! Use stringToUTF8() instead!");var o,a;r&&(a=e+lengthBytesUTF8(t),o=HEAP8[a]),stringToUTF8(t,e,1/0),r&&(HEAP8[a]=o)}Module.writeStringToMemory=writeStringToMemory;function writeArrayToMemory(t,e){HEAP8.set(t,e)}Module.writeArrayToMemory=writeArrayToMemory;function writeAsciiToMemory(t,e,r){for(var o=0;o>0]=t.charCodeAt(o);r||(HEAP8[e>>0]=0)}if(Module.writeAsciiToMemory=writeAsciiToMemory,(!Math.imul||Math.imul(4294967295,5)!==-5)&&(Math.imul=function t(e,r){var o=e>>>16,a=e&65535,n=r>>>16,u=r&65535;return a*u+(o*u+a*n<<16)|0}),Math.imul=Math.imul,!Math.fround){var froundBuffer=new Float32Array(1);Math.fround=function(t){return froundBuffer[0]=t,froundBuffer[0]}}Math.fround=Math.fround,Math.clz32||(Math.clz32=function(t){t=t>>>0;for(var e=0;e<32;e++)if(t&1<<31-e)return e;return 32}),Math.clz32=Math.clz32,Math.trunc||(Math.trunc=function(t){return t<0?Math.ceil(t):Math.floor(t)}),Math.trunc=Math.trunc;var Math_abs=Math.abs,Math_cos=Math.cos,Math_sin=Math.sin,Math_tan=Math.tan,Math_acos=Math.acos,Math_asin=Math.asin,Math_atan=Math.atan,Math_atan2=Math.atan2,Math_exp=Math.exp,Math_log=Math.log,Math_sqrt=Math.sqrt,Math_ceil=Math.ceil,Math_floor=Math.floor,Math_pow=Math.pow,Math_imul=Math.imul,Math_fround=Math.fround,Math_round=Math.round,Math_min=Math.min,Math_clz32=Math.clz32,Math_trunc=Math.trunc,runDependencies=0,runDependencyWatcher=null,dependenciesFulfilled=null;function getUniqueRunDependency(t){return t}function addRunDependency(t){runDependencies++,Module.monitorRunDependencies&&Module.monitorRunDependencies(runDependencies)}Module.addRunDependency=addRunDependency;function removeRunDependency(t){if(runDependencies--,Module.monitorRunDependencies&&Module.monitorRunDependencies(runDependencies),runDependencies==0&&(runDependencyWatcher!==null&&(clearInterval(runDependencyWatcher),runDependencyWatcher=null),dependenciesFulfilled)){var e=dependenciesFulfilled;dependenciesFulfilled=null,e()}}Module.removeRunDependency=removeRunDependency,Module.preloadedImages={},Module.preloadedAudios={};var ASM_CONSTS=[function(t,e,r,o,a,n,u,A){return _nbind.callbackSignatureList[t].apply(this,arguments)}];function _emscripten_asm_const_iiiiiiii(t,e,r,o,a,n,u,A){return ASM_CONSTS[t](e,r,o,a,n,u,A)}function _emscripten_asm_const_iiiii(t,e,r,o,a){return ASM_CONSTS[t](e,r,o,a)}function _emscripten_asm_const_iiidddddd(t,e,r,o,a,n,u,A,p){return ASM_CONSTS[t](e,r,o,a,n,u,A,p)}function _emscripten_asm_const_iiididi(t,e,r,o,a,n,u){return ASM_CONSTS[t](e,r,o,a,n,u)}function _emscripten_asm_const_iiii(t,e,r,o){return ASM_CONSTS[t](e,r,o)}function _emscripten_asm_const_iiiid(t,e,r,o,a){return ASM_CONSTS[t](e,r,o,a)}function _emscripten_asm_const_iiiiii(t,e,r,o,a,n){return ASM_CONSTS[t](e,r,o,a,n)}STATIC_BASE=Runtime.GLOBAL_BASE,STATICTOP=STATIC_BASE+12800,__ATINIT__.push({func:function(){__GLOBAL__sub_I_Yoga_cpp()}},{func:function(){__GLOBAL__sub_I_nbind_cc()}},{func:function(){__GLOBAL__sub_I_common_cc()}},{func:function(){__GLOBAL__sub_I_Binding_cc()}}),allocate([0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,192,127,0,0,192,127,0,0,192,127,0,0,192,127,3,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,3,0,0,0,0,0,192,127,3,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,192,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,192,127,0,0,192,127,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,192,127,0,0,0,0,0,0,0,0,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,192,127,0,0,192,127,0,0,0,0,0,0,0,0,255,255,255,255,255,255,255,255,0,0,128,191,0,0,128,191,0,0,192,127,0,0,0,0,0,0,0,0,0,0,128,63,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3,0,0,0,1,0,0,0,2,0,0,0,0,0,0,0,190,12,0,0,200,12,0,0,208,12,0,0,216,12,0,0,230,12,0,0,242,12,0,0,1,0,0,0,3,0,0,0,0,0,0,0,2,0,0,0,0,0,192,127,3,0,0,0,180,45,0,0,181,45,0,0,182,45,0,0,181,45,0,0,182,45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,3,0,0,0,1,0,0,0,4,0,0,0,183,45,0,0,181,45,0,0,181,45,0,0,181,45,0,0,181,45,0,0,181,45,0,0,181,45,0,0,184,45,0,0,185,45,0,0,181,45,0,0,181,45,0,0,182,45,0,0,186,45,0,0,185,45,0,0,148,4,0,0,3,0,0,0,187,45,0,0,164,4,0,0,188,45,0,0,2,0,0,0,189,45,0,0,164,4,0,0,188,45,0,0,185,45,0,0,164,4,0,0,185,45,0,0,164,4,0,0,188,45,0,0,181,45,0,0,182,45,0,0,181,45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,5,0,0,0,6,0,0,0,1,0,0,0,7,0,0,0,183,45,0,0,182,45,0,0,181,45,0,0,190,45,0,0,190,45,0,0,182,45,0,0,182,45,0,0,185,45,0,0,181,45,0,0,185,45,0,0,182,45,0,0,181,45,0,0,185,45,0,0,182,45,0,0,185,45,0,0,48,5,0,0,3,0,0,0,56,5,0,0,1,0,0,0,189,45,0,0,185,45,0,0,164,4,0,0,76,5,0,0,2,0,0,0,191,45,0,0,186,45,0,0,182,45,0,0,185,45,0,0,192,45,0,0,185,45,0,0,182,45,0,0,186,45,0,0,185,45,0,0,76,5,0,0,76,5,0,0,136,5,0,0,182,45,0,0,181,45,0,0,2,0,0,0,190,45,0,0,136,5,0,0,56,19,0,0,156,5,0,0,2,0,0,0,184,45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,8,0,0,0,9,0,0,0,1,0,0,0,10,0,0,0,204,5,0,0,181,45,0,0,181,45,0,0,2,0,0,0,180,45,0,0,204,5,0,0,2,0,0,0,195,45,0,0,236,5,0,0,97,19,0,0,198,45,0,0,211,45,0,0,212,45,0,0,213,45,0,0,214,45,0,0,215,45,0,0,188,45,0,0,182,45,0,0,216,45,0,0,217,45,0,0,218,45,0,0,219,45,0,0,192,45,0,0,181,45,0,0,0,0,0,0,185,45,0,0,110,19,0,0,186,45,0,0,115,19,0,0,221,45,0,0,120,19,0,0,148,4,0,0,132,19,0,0,96,6,0,0,145,19,0,0,222,45,0,0,164,19,0,0,223,45,0,0,173,19,0,0,0,0,0,0,3,0,0,0,104,6,0,0,1,0,0,0,187,45,0,0,0,0,0,0,0,0,0,0,1,0,0,0,11,0,0,0,12,0,0,0,1,0,0,0,13,0,0,0,185,45,0,0,224,45,0,0,164,6,0,0,188,45,0,0,172,6,0,0,180,6,0,0,2,0,0,0,188,6,0,0,7,0,0,0,224,45,0,0,7,0,0,0,164,6,0,0,1,0,0,0,213,45,0,0,185,45,0,0,224,45,0,0,172,6,0,0,185,45,0,0,224,45,0,0,164,6,0,0,185,45,0,0,224,45,0,0,211,45,0,0,211,45,0,0,222,45,0,0,211,45,0,0,224,45,0,0,222,45,0,0,211,45,0,0,224,45,0,0,172,6,0,0,222,45,0,0,211,45,0,0,224,45,0,0,188,45,0,0,222,45,0,0,211,45,0,0,40,7,0,0,188,45,0,0,2,0,0,0,224,45,0,0,185,45,0,0,188,45,0,0,188,45,0,0,188,45,0,0,188,45,0,0,222,45,0,0,224,45,0,0,148,4,0,0,185,45,0,0,148,4,0,0,148,4,0,0,148,4,0,0,148,4,0,0,148,4,0,0,185,45,0,0,164,6,0,0,148,4,0,0,0,0,0,0,0,0,0,0,1,0,0,0,14,0,0,0,15,0,0,0,1,0,0,0,16,0,0,0,148,7,0,0,2,0,0,0,225,45,0,0,183,45,0,0,188,45,0,0,168,7,0,0,5,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,234,45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,255,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,148,45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,9,0,0,5,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,2,0,0,0,242,45,0,0,0,4,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,255,255,255,255,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,67,111,117,108,100,32,110,111,116,32,97,108,108,111,99,97,116,101,32,109,101,109,111,114,121,32,102,111,114,32,110,111,100,101,0,67,97,110,110,111,116,32,114,101,115,101,116,32,97,32,110,111,100,101,32,119,104,105,99,104,32,115,116,105,108,108,32,104,97,115,32,99,104,105,108,100,114,101,110,32,97,116,116,97,99,104,101,100,0,67,97,110,110,111,116,32,114,101,115,101,116,32,97,32,110,111,100,101,32,115,116,105,108,108,32,97,116,116,97,99,104,101,100,32,116,111,32,97,32,112,97,114,101,110,116,0,67,111,117,108,100,32,110,111,116,32,97,108,108,111,99,97,116,101,32,109,101,109,111,114,121,32,102,111,114,32,99,111,110,102,105,103,0,67,97,110,110,111,116,32,115,101,116,32,109,101,97,115,117,114,101,32,102,117,110,99,116,105,111,110,58,32,78,111,100,101,115,32,119,105,116,104,32,109,101,97,115,117,114,101,32,102,117,110,99,116,105,111,110,115,32,99,97,110,110,111,116,32,104,97,118,101,32,99,104,105,108,100,114,101,110,46,0,67,104,105,108,100,32,97,108,114,101,97,100,121,32,104,97,115,32,97,32,112,97,114,101,110,116,44,32,105,116,32,109,117,115,116,32,98,101,32,114,101,109,111,118,101,100,32,102,105,114,115,116,46,0,67,97,110,110,111,116,32,97,100,100,32,99,104,105,108,100,58,32,78,111,100,101,115,32,119,105,116,104,32,109,101,97,115,117,114,101,32,102,117,110,99,116,105,111,110,115,32,99,97,110,110,111,116,32,104,97,118,101,32,99,104,105,108,100,114,101,110,46,0,79,110,108,121,32,108,101,97,102,32,110,111,100,101,115,32,119,105,116,104,32,99,117,115,116,111,109,32,109,101,97,115,117,114,101,32,102,117,110,99,116,105,111,110,115,115,104,111,117,108,100,32,109,97,110,117,97,108,108,121,32,109,97,114,107,32,116,104,101,109,115,101,108,118,101,115,32,97,115,32,100,105,114,116,121,0,67,97,110,110,111,116,32,103,101,116,32,108,97,121,111,117,116,32,112,114,111,112,101,114,116,105,101,115,32,111,102,32,109,117,108,116,105,45,101,100,103,101,32,115,104,111,114,116,104,97,110,100,115,0,37,115,37,100,46,123,91,115,107,105,112,112,101,100,93,32,0,119,109,58,32,37,115,44,32,104,109,58,32,37,115,44,32,97,119,58,32,37,102,32,97,104,58,32,37,102,32,61,62,32,100,58,32,40,37,102,44,32,37,102,41,32,37,115,10,0,37,115,37,100,46,123,37,115,0,42,0,119,109,58,32,37,115,44,32,104,109,58,32,37,115,44,32,97,119,58,32,37,102,32,97,104,58,32,37,102,32,37,115,10,0,37,115,37,100,46,125,37,115,0,119,109,58,32,37,115,44,32,104,109,58,32,37,115,44,32,100,58,32,40,37,102,44,32,37,102,41,32,37,115,10,0,79,117,116,32,111,102,32,99,97,99,104,101,32,101,110,116,114,105,101,115,33,10,0,83,99,97,108,101,32,102,97,99,116,111,114,32,115,104,111,117,108,100,32,110,111,116,32,98,101,32,108,101,115,115,32,116,104,97,110,32,122,101,114,111,0,105,110,105,116,105,97,108,0,37,115,10,0,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,0,85,78,68,69,70,73,78,69,68,0,69,88,65,67,84,76,89,0,65,84,95,77,79,83,84,0,76,65,89,95,85,78,68,69,70,73,78,69,68,0,76,65,89,95,69,88,65,67,84,76,89,0,76,65,89,95,65,84,95,77,79,83,84,0,97,118,97,105,108,97,98,108,101,87,105,100,116,104,32,105,115,32,105,110,100,101,102,105,110,105,116,101,32,115,111,32,119,105,100,116,104,77,101,97,115,117,114,101,77,111,100,101,32,109,117,115,116,32,98,101,32,89,71,77,101,97,115,117,114,101,77,111,100,101,85,110,100,101,102,105,110,101,100,0,97,118,97,105,108,97,98,108,101,72,101,105,103,104,116,32,105,115,32,105,110,100,101,102,105,110,105,116,101,32,115,111,32,104,101,105,103,104,116,77,101,97,115,117,114,101,77,111,100,101,32,109,117,115,116,32,98,101,32,89,71,77,101,97,115,117,114,101,77,111,100,101,85,110,100,101,102,105,110,101,100,0,102,108,101,120,0,115,116,114,101,116,99,104,0,109,117,108,116,105,108,105,110,101,45,115,116,114,101,116,99,104,0,69,120,112,101,99,116,101,100,32,110,111,100,101,32,116,111,32,104,97,118,101,32,99,117,115,116,111,109,32,109,101,97,115,117,114,101,32,102,117,110,99,116,105,111,110,0,109,101,97,115,117,114,101,0,69,120,112,101,99,116,32,99,117,115,116,111,109,32,98,97,115,101,108,105,110,101,32,102,117,110,99,116,105,111,110,32,116,111,32,110,111,116,32,114,101,116,117,114,110,32,78,97,78,0,97,98,115,45,109,101,97,115,117,114,101,0,97,98,115,45,108,97,121,111,117,116,0,78,111,100,101,0,99,114,101,97,116,101,68,101,102,97,117,108,116,0,99,114,101,97,116,101,87,105,116,104,67,111,110,102,105,103,0,100,101,115,116,114,111,121,0,114,101,115,101,116,0,99,111,112,121,83,116,121,108,101,0,115,101,116,80,111,115,105,116,105,111,110,84,121,112,101,0,115,101,116,80,111,115,105,116,105,111,110,0,115,101,116,80,111,115,105,116,105,111,110,80,101,114,99,101,110,116,0,115,101,116,65,108,105,103,110,67,111,110,116,101,110,116,0,115,101,116,65,108,105,103,110,73,116,101,109,115,0,115,101,116,65,108,105,103,110,83,101,108,102,0,115,101,116,70,108,101,120,68,105,114,101,99,116,105,111,110,0,115,101,116,70,108,101,120,87,114,97,112,0,115,101,116,74,117,115,116,105,102,121,67,111,110,116,101,110,116,0,115,101,116,77,97,114,103,105,110,0,115,101,116,77,97,114,103,105,110,80,101,114,99,101,110,116,0,115,101,116,77,97,114,103,105,110,65,117,116,111,0,115,101,116,79,118,101,114,102,108,111,119,0,115,101,116,68,105,115,112,108,97,121,0,115,101,116,70,108,101,120,0,115,101,116,70,108,101,120,66,97,115,105,115,0,115,101,116,70,108,101,120,66,97,115,105,115,80,101,114,99,101,110,116,0,115,101,116,70,108,101,120,71,114,111,119,0,115,101,116,70,108,101,120,83,104,114,105,110,107,0,115,101,116,87,105,100,116,104,0,115,101,116,87,105,100,116,104,80,101,114,99,101,110,116,0,115,101,116,87,105,100,116,104,65,117,116,111,0,115,101,116,72,101,105,103,104,116,0,115,101,116,72,101,105,103,104,116,80,101,114,99,101,110,116,0,115,101,116,72,101,105,103,104,116,65,117,116,111,0,115,101,116,77,105,110,87,105,100,116,104,0,115,101,116,77,105,110,87,105,100,116,104,80,101,114,99,101,110,116,0,115,101,116,77,105,110,72,101,105,103,104,116,0,115,101,116,77,105,110,72,101,105,103,104,116,80,101,114,99,101,110,116,0,115,101,116,77,97,120,87,105,100,116,104,0,115,101,116,77,97,120,87,105,100,116,104,80,101,114,99,101,110,116,0,115,101,116,77,97,120,72,101,105,103,104,116,0,115,101,116,77,97,120,72,101,105,103,104,116,80,101,114,99,101,110,116,0,115,101,116,65,115,112,101,99,116,82,97,116,105,111,0,115,101,116,66,111,114,100,101,114,0,115,101,116,80,97,100,100,105,110,103,0,115,101,116,80,97,100,100,105,110,103,80,101,114,99,101,110,116,0,103,101,116,80,111,115,105,116,105,111,110,84,121,112,101,0,103,101,116,80,111,115,105,116,105,111,110,0,103,101,116,65,108,105,103,110,67,111,110,116,101,110,116,0,103,101,116,65,108,105,103,110,73,116,101,109,115,0,103,101,116,65,108,105,103,110,83,101,108,102,0,103,101,116,70,108,101,120,68,105,114,101,99,116,105,111,110,0,103,101,116,70,108,101,120,87,114,97,112,0,103,101,116,74,117,115,116,105,102,121,67,111,110,116,101,110,116,0,103,101,116,77,97,114,103,105,110,0,103,101,116,70,108,101,120,66,97,115,105,115,0,103,101,116,70,108,101,120,71,114,111,119,0,103,101,116,70,108,101,120,83,104,114,105,110,107,0,103,101,116,87,105,100,116,104,0,103,101,116,72,101,105,103,104,116,0,103,101,116,77,105,110,87,105,100,116,104,0,103,101,116,77,105,110,72,101,105,103,104,116,0,103,101,116,77,97,120,87,105,100,116,104,0,103,101,116,77,97,120,72,101,105,103,104,116,0,103,101,116,65,115,112,101,99,116,82,97,116,105,111,0,103,101,116,66,111,114,100,101,114,0,103,101,116,79,118,101,114,102,108,111,119,0,103,101,116,68,105,115,112,108,97,121,0,103,101,116,80,97,100,100,105,110,103,0,105,110,115,101,114,116,67,104,105,108,100,0,114,101,109,111,118,101,67,104,105,108,100,0,103,101,116,67,104,105,108,100,67,111,117,110,116,0,103,101,116,80,97,114,101,110,116,0,103,101,116,67,104,105,108,100,0,115,101,116,77,101,97,115,117,114,101,70,117,110,99,0,117,110,115,101,116,77,101,97,115,117,114,101,70,117,110,99,0,109,97,114,107,68,105,114,116,121,0,105,115,68,105,114,116,121,0,99,97,108,99,117,108,97,116,101,76,97,121,111,117,116,0,103,101,116,67,111,109,112,117,116,101,100,76,101,102,116,0,103,101,116,67,111,109,112,117,116,101,100,82,105,103,104,116,0,103,101,116,67,111,109,112,117,116,101,100,84,111,112,0,103,101,116,67,111,109,112,117,116,101,100,66,111,116,116,111,109,0,103,101,116,67,111,109,112,117,116,101,100,87,105,100,116,104,0,103,101,116,67,111,109,112,117,116,101,100,72,101,105,103,104,116,0,103,101,116,67,111,109,112,117,116,101,100,76,97,121,111,117,116,0,103,101,116,67,111,109,112,117,116,101,100,77,97,114,103,105,110,0,103,101,116,67,111,109,112,117,116,101,100,66,111,114,100,101,114,0,103,101,116,67,111,109,112,117,116,101,100,80,97,100,100,105,110,103,0,67,111,110,102,105,103,0,99,114,101,97,116,101,0,115,101,116,69,120,112,101,114,105,109,101,110,116,97,108,70,101,97,116,117,114,101,69,110,97,98,108,101,100,0,115,101,116,80,111,105,110,116,83,99,97,108,101,70,97,99,116,111,114,0,105,115,69,120,112,101,114,105,109,101,110,116,97,108,70,101,97,116,117,114,101,69,110,97,98,108,101,100,0,86,97,108,117,101,0,76,97,121,111,117,116,0,83,105,122,101,0,103,101,116,73,110,115,116,97,110,99,101,67,111,117,110,116,0,73,110,116,54,52,0,1,1,1,2,2,4,4,4,4,8,8,4,8,118,111,105,100,0,98,111,111,108,0,115,116,100,58,58,115,116,114,105,110,103,0,99,98,70,117,110,99,116,105,111,110,32,38,0,99,111,110,115,116,32,99,98,70,117,110,99,116,105,111,110,32,38,0,69,120,116,101,114,110,97,108,0,66,117,102,102,101,114,0,78,66,105,110,100,73,68,0,78,66,105,110,100,0,98,105,110,100,95,118,97,108,117,101,0,114,101,102,108,101,99,116,0,113,117,101,114,121,84,121,112,101,0,108,97,108,108,111,99,0,108,114,101,115,101,116,0,123,114,101,116,117,114,110,40,95,110,98,105,110,100,46,99,97,108,108,98,97,99,107,83,105,103,110,97,116,117,114,101,76,105,115,116,91,36,48,93,46,97,112,112,108,121,40,116,104,105,115,44,97,114,103,117,109,101,110,116,115,41,41,59,125,0,95,110,98,105,110,100,95,110,101,119,0,17,0,10,0,17,17,17,0,0,0,0,5,0,0,0,0,0,0,9,0,0,0,0,11,0,0,0,0,0,0,0,0,17,0,15,10,17,17,17,3,10,7,0,1,19,9,11,11,0,0,9,6,11,0,0,11,0,6,17,0,0,0,17,17,17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,17,0,10,10,17,17,17,0,10,0,0,2,0,9,11,0,0,0,9,0,11,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,12,0,0,0,0,9,12,0,0,0,0,0,12,0,0,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,13,0,0,0,4,13,0,0,0,0,9,14,0,0,0,0,0,14,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,15,0,0,0,0,15,0,0,0,0,9,16,0,0,0,0,0,16,0,0,16,0,0,18,0,0,0,18,18,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18,0,0,0,18,18,18,0,0,0,0,0,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,10,0,0,0,0,9,11,0,0,0,0,0,11,0,0,11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,0,0,0,0,0,0,0,12,0,0,0,0,12,0,0,0,0,9,12,0,0,0,0,0,12,0,0,12,0,0,45,43,32,32,32,48,88,48,120,0,40,110,117,108,108,41,0,45,48,88,43,48,88,32,48,88,45,48,120,43,48,120,32,48,120,0,105,110,102,0,73,78,70,0,110,97,110,0,78,65,78,0,48,49,50,51,52,53,54,55,56,57,65,66,67,68,69,70,46,0,84,33,34,25,13,1,2,3,17,75,28,12,16,4,11,29,18,30,39,104,110,111,112,113,98,32,5,6,15,19,20,21,26,8,22,7,40,36,23,24,9,10,14,27,31,37,35,131,130,125,38,42,43,60,61,62,63,67,71,74,77,88,89,90,91,92,93,94,95,96,97,99,100,101,102,103,105,106,107,108,114,115,116,121,122,123,124,0,73,108,108,101,103,97,108,32,98,121,116,101,32,115,101,113,117,101,110,99,101,0,68,111,109,97,105,110,32,101,114,114,111,114,0,82,101,115,117,108,116,32,110,111,116,32,114,101,112,114,101,115,101,110,116,97,98,108,101,0,78,111,116,32,97,32,116,116,121,0,80,101,114,109,105,115,115,105,111,110,32,100,101,110,105,101,100,0,79,112,101,114,97,116,105,111,110,32,110,111,116,32,112,101,114,109,105,116,116,101,100,0,78,111,32,115,117,99,104,32,102,105,108,101,32,111,114,32,100,105,114,101,99,116,111,114,121,0,78,111,32,115,117,99,104,32,112,114,111,99,101,115,115,0,70,105,108,101,32,101,120,105,115,116,115,0,86,97,108,117,101,32,116,111,111,32,108,97,114,103,101,32,102,111,114,32,100,97,116,97,32,116,121,112,101,0,78,111,32,115,112,97,99,101,32,108,101,102,116,32,111,110,32,100,101,118,105,99,101,0,79,117,116,32,111,102,32,109,101,109,111,114,121,0,82,101,115,111,117,114,99,101,32,98,117,115,121,0,73,110,116,101,114,114,117,112,116,101,100,32,115,121,115,116,101,109,32,99,97,108,108,0,82,101,115,111,117,114,99,101,32,116,101,109,112,111,114,97,114,105,108,121,32,117,110,97,118,97,105,108,97,98,108,101,0,73,110,118,97,108,105,100,32,115,101,101,107,0,67,114,111,115,115,45,100,101,118,105,99,101,32,108,105,110,107,0,82,101,97,100,45,111,110,108,121,32,102,105,108,101,32,115,121,115,116,101,109,0,68,105,114,101,99,116,111,114,121,32,110,111,116,32,101,109,112,116,121,0,67,111,110,110,101,99,116,105,111,110,32,114,101,115,101,116,32,98,121,32,112,101,101,114,0,79,112,101,114,97,116,105,111,110,32,116,105,109,101,100,32,111,117,116,0,67,111,110,110,101,99,116,105,111,110,32,114,101,102,117,115,101,100,0,72,111,115,116,32,105,115,32,100,111,119,110,0,72,111,115,116,32,105,115,32,117,110,114,101,97,99,104,97,98,108,101,0,65,100,100,114,101,115,115,32,105,110,32,117,115,101,0,66,114,111,107,101,110,32,112,105,112,101,0,73,47,79,32,101,114,114,111,114,0,78,111,32,115,117,99,104,32,100,101,118,105,99,101,32,111,114,32,97,100,100,114,101,115,115,0,66,108,111,99,107,32,100,101,118,105,99,101,32,114,101,113,117,105,114,101,100,0,78,111,32,115,117,99,104,32,100,101,118,105,99,101,0,78,111,116,32,97,32,100,105,114,101,99,116,111,114,121,0,73,115,32,97,32,100,105,114,101,99,116,111,114,121,0,84,101,120,116,32,102,105,108,101,32,98,117,115,121,0,69,120,101,99,32,102,111,114,109,97,116,32,101,114,114,111,114,0,73,110,118,97,108,105,100,32,97,114,103,117,109,101,110,116,0,65,114,103,117,109,101,110,116,32,108,105,115,116,32,116,111,111,32,108,111,110,103,0,83,121,109,98,111,108,105,99,32,108,105,110,107,32,108,111,111,112,0,70,105,108,101,110,97,109,101,32,116,111,111,32,108,111,110,103,0,84,111,111,32,109,97,110,121,32,111,112,101,110,32,102,105,108,101,115,32,105,110,32,115,121,115,116,101,109,0,78,111,32,102,105,108,101,32,100,101,115,99,114,105,112,116,111,114,115,32,97,118,97,105,108,97,98,108,101,0,66,97,100,32,102,105,108,101,32,100,101,115,99,114,105,112,116,111,114,0,78,111,32,99,104,105,108,100,32,112,114,111,99,101,115,115,0,66,97,100,32,97,100,100,114,101,115,115,0,70,105,108,101,32,116,111,111,32,108,97,114,103,101,0,84,111,111,32,109,97,110,121,32,108,105,110,107,115,0,78,111,32,108,111,99,107,115,32,97,118,97,105,108,97,98,108,101,0,82,101,115,111,117,114,99,101,32,100,101,97,100,108,111,99,107,32,119,111,117,108,100,32,111,99,99,117,114,0,83,116,97,116,101,32,110,111,116,32,114,101,99,111,118,101,114,97,98,108,101,0,80,114,101,118,105,111,117,115,32,111,119,110,101,114,32,100,105,101,100,0,79,112,101,114,97,116,105,111,110,32,99,97,110,99,101,108,101,100,0,70,117,110,99,116,105,111,110,32,110,111,116,32,105,109,112,108,101,109,101,110,116,101,100,0,78,111,32,109,101,115,115,97,103,101,32,111,102,32,100,101,115,105,114,101,100,32,116,121,112,101,0,73,100,101,110,116,105,102,105,101,114,32,114,101,109,111,118,101,100,0,68,101,118,105,99,101,32,110,111,116,32,97,32,115,116,114,101,97,109,0,78,111,32,100,97,116,97,32,97,118,97,105,108,97,98,108,101,0,68,101,118,105,99,101,32,116,105,109,101,111,117,116,0,79,117,116,32,111,102,32,115,116,114,101,97,109,115,32,114,101,115,111,117,114,99,101,115,0,76,105,110,107,32,104,97,115,32,98,101,101,110,32,115,101,118,101,114,101,100,0,80,114,111,116,111,99,111,108,32,101,114,114,111,114,0,66,97,100,32,109,101,115,115,97,103,101,0,70,105,108,101,32,100,101,115,99,114,105,112,116,111,114,32,105,110,32,98,97,100,32,115,116,97,116,101,0,78,111,116,32,97,32,115,111,99,107,101,116,0,68,101,115,116,105,110,97,116,105,111,110,32,97,100,100,114,101,115,115,32,114,101,113,117,105,114,101,100,0,77,101,115,115,97,103,101,32,116,111,111,32,108,97,114,103,101,0,80,114,111,116,111,99,111,108,32,119,114,111,110,103,32,116,121,112,101,32,102,111,114,32,115,111,99,107,101,116,0,80,114,111,116,111,99,111,108,32,110,111,116,32,97,118,97,105,108,97,98,108,101,0,80,114,111,116,111,99,111,108,32,110,111,116,32,115,117,112,112,111,114,116,101,100,0,83,111,99,107,101,116,32,116,121,112,101,32,110,111,116,32,115,117,112,112,111,114,116,101,100,0,78,111,116,32,115,117,112,112,111,114,116,101,100,0,80,114,111,116,111,99,111,108,32,102,97,109,105,108,121,32,110,111,116,32,115,117,112,112,111,114,116,101,100,0,65,100,100,114,101,115,115,32,102,97,109,105,108,121,32,110,111,116,32,115,117,112,112,111,114,116,101,100,32,98,121,32,112,114,111,116,111,99,111,108,0,65,100,100,114,101,115,115,32,110,111,116,32,97,118,97,105,108,97,98,108,101,0,78,101,116,119,111,114,107,32,105,115,32,100,111,119,110,0,78,101,116,119,111,114,107,32,117,110,114,101,97,99,104,97,98,108,101,0,67,111,110,110,101,99,116,105,111,110,32,114,101,115,101,116,32,98,121,32,110,101,116,119,111,114,107,0,67,111,110,110,101,99,116,105,111,110,32,97,98,111,114,116,101,100,0,78,111,32,98,117,102,102,101,114,32,115,112,97,99,101,32,97,118,97,105,108,97,98,108,101,0,83,111,99,107,101,116,32,105,115,32,99,111,110,110,101,99,116,101,100,0,83,111,99,107,101,116,32,110,111,116,32,99,111,110,110,101,99,116,101,100,0,67,97,110,110,111,116,32,115,101,110,100,32,97,102,116,101,114,32,115,111,99,107,101,116,32,115,104,117,116,100,111,119,110,0,79,112,101,114,97,116,105,111,110,32,97,108,114,101,97,100,121,32,105,110,32,112,114,111,103,114,101,115,115,0,79,112,101,114,97,116,105,111,110,32,105,110,32,112,114,111,103,114,101,115,115,0,83,116,97,108,101,32,102,105,108,101,32,104,97,110,100,108,101,0,82,101,109,111,116,101,32,73,47,79,32,101,114,114,111,114,0,81,117,111,116,97,32,101,120,99,101,101,100,101,100,0,78,111,32,109,101,100,105,117,109,32,102,111,117,110,100,0,87,114,111,110,103,32,109,101,100,105,117,109,32,116,121,112,101,0,78,111,32,101,114,114,111,114,32,105,110,102,111,114,109,97,116,105,111,110,0,0],"i8",ALLOC_NONE,Runtime.GLOBAL_BASE);var tempDoublePtr=STATICTOP;STATICTOP+=16;function _atexit(t,e){__ATEXIT__.unshift({func:t,arg:e})}function ___cxa_atexit(){return _atexit.apply(null,arguments)}function _abort(){Module.abort()}function __ZN8facebook4yoga14YGNodeToStringEPNSt3__212basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEP6YGNode14YGPrintOptionsj(){Module.printErr("missing function: _ZN8facebook4yoga14YGNodeToStringEPNSt3__212basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEP6YGNode14YGPrintOptionsj"),abort(-1)}function __decorate(t,e,r,o){var a=arguments.length,n=a<3?e:o===null?o=Object.getOwnPropertyDescriptor(e,r):o,u;if(typeof Reflect=="object"&&typeof Reflect.decorate=="function")n=Reflect.decorate(t,e,r,o);else for(var A=t.length-1;A>=0;A--)(u=t[A])&&(n=(a<3?u(n):a>3?u(e,r,n):u(e,r))||n);return a>3&&n&&Object.defineProperty(e,r,n),n}function _defineHidden(t){return function(e,r){Object.defineProperty(e,r,{configurable:!1,enumerable:!1,value:t,writable:!0})}}var _nbind={};function __nbind_free_external(t){_nbind.externalList[t].dereference(t)}function __nbind_reference_external(t){_nbind.externalList[t].reference()}function _llvm_stackrestore(t){var e=_llvm_stacksave,r=e.LLVM_SAVEDSTACKS[t];e.LLVM_SAVEDSTACKS.splice(t,1),Runtime.stackRestore(r)}function __nbind_register_pool(t,e,r,o){_nbind.Pool.pageSize=t,_nbind.Pool.usedPtr=e/4,_nbind.Pool.rootPtr=r,_nbind.Pool.pagePtr=o/4,HEAP32[e/4]=16909060,HEAP8[e]==1&&(_nbind.bigEndian=!0),HEAP32[e/4]=0,_nbind.makeTypeKindTbl=(n={},n[1024]=_nbind.PrimitiveType,n[64]=_nbind.Int64Type,n[2048]=_nbind.BindClass,n[3072]=_nbind.BindClassPtr,n[4096]=_nbind.SharedClassPtr,n[5120]=_nbind.ArrayType,n[6144]=_nbind.ArrayType,n[7168]=_nbind.CStringType,n[9216]=_nbind.CallbackType,n[10240]=_nbind.BindType,n),_nbind.makeTypeNameTbl={Buffer:_nbind.BufferType,External:_nbind.ExternalType,Int64:_nbind.Int64Type,_nbind_new:_nbind.CreateValueType,bool:_nbind.BooleanType,"cbFunction &":_nbind.CallbackType,"const cbFunction &":_nbind.CallbackType,"const std::string &":_nbind.StringType,"std::string":_nbind.StringType},Module.toggleLightGC=_nbind.toggleLightGC,_nbind.callUpcast=Module.dynCall_ii;var a=_nbind.makeType(_nbind.constructType,{flags:2048,id:0,name:""});a.proto=Module,_nbind.BindClass.list.push(a);var n}function _emscripten_set_main_loop_timing(t,e){if(Browser.mainLoop.timingMode=t,Browser.mainLoop.timingValue=e,!Browser.mainLoop.func)return 1;if(t==0)Browser.mainLoop.scheduler=function(){var u=Math.max(0,Browser.mainLoop.tickStartTime+e-_emscripten_get_now())|0;setTimeout(Browser.mainLoop.runner,u)},Browser.mainLoop.method="timeout";else if(t==1)Browser.mainLoop.scheduler=function(){Browser.requestAnimationFrame(Browser.mainLoop.runner)},Browser.mainLoop.method="rAF";else if(t==2){if(!window.setImmediate){let n=function(u){u.source===window&&u.data===o&&(u.stopPropagation(),r.shift()())};var a=n,r=[],o="setimmediate";window.addEventListener("message",n,!0),window.setImmediate=function(A){r.push(A),ENVIRONMENT_IS_WORKER?(Module.setImmediates===void 0&&(Module.setImmediates=[]),Module.setImmediates.push(A),window.postMessage({target:o})):window.postMessage(o,"*")}}Browser.mainLoop.scheduler=function(){window.setImmediate(Browser.mainLoop.runner)},Browser.mainLoop.method="immediate"}return 0}function _emscripten_get_now(){abort()}function _emscripten_set_main_loop(t,e,r,o,a){Module.noExitRuntime=!0,assert(!Browser.mainLoop.func,"emscripten_set_main_loop: there can only be one main loop function at once: call emscripten_cancel_main_loop to cancel the previous one before setting a new one with different parameters."),Browser.mainLoop.func=t,Browser.mainLoop.arg=o;var n;typeof o<"u"?n=function(){Module.dynCall_vi(t,o)}:n=function(){Module.dynCall_v(t)};var u=Browser.mainLoop.currentlyRunningMainloop;if(Browser.mainLoop.runner=function(){if(!ABORT){if(Browser.mainLoop.queue.length>0){var p=Date.now(),h=Browser.mainLoop.queue.shift();if(h.func(h.arg),Browser.mainLoop.remainingBlockers){var E=Browser.mainLoop.remainingBlockers,I=E%1==0?E-1:Math.floor(E);h.counted?Browser.mainLoop.remainingBlockers=I:(I=I+.5,Browser.mainLoop.remainingBlockers=(8*E+I)/9)}if(console.log('main loop blocker "'+h.name+'" took '+(Date.now()-p)+" ms"),Browser.mainLoop.updateStatus(),u1&&Browser.mainLoop.currentFrameNumber%Browser.mainLoop.timingValue!=0){Browser.mainLoop.scheduler();return}else Browser.mainLoop.timingMode==0&&(Browser.mainLoop.tickStartTime=_emscripten_get_now());Browser.mainLoop.method==="timeout"&&Module.ctx&&(Module.printErr("Looks like you are rendering without using requestAnimationFrame for the main loop. You should use 0 for the frame rate in emscripten_set_main_loop in order to use requestAnimationFrame, as that can greatly improve your frame rates!"),Browser.mainLoop.method=""),Browser.mainLoop.runIter(n),!(u0?_emscripten_set_main_loop_timing(0,1e3/e):_emscripten_set_main_loop_timing(1,1),Browser.mainLoop.scheduler()),r)throw"SimulateInfiniteLoop"}var Browser={mainLoop:{scheduler:null,method:"",currentlyRunningMainloop:0,func:null,arg:0,timingMode:0,timingValue:0,currentFrameNumber:0,queue:[],pause:function(){Browser.mainLoop.scheduler=null,Browser.mainLoop.currentlyRunningMainloop++},resume:function(){Browser.mainLoop.currentlyRunningMainloop++;var t=Browser.mainLoop.timingMode,e=Browser.mainLoop.timingValue,r=Browser.mainLoop.func;Browser.mainLoop.func=null,_emscripten_set_main_loop(r,0,!1,Browser.mainLoop.arg,!0),_emscripten_set_main_loop_timing(t,e),Browser.mainLoop.scheduler()},updateStatus:function(){if(Module.setStatus){var t=Module.statusMessage||"Please wait...",e=Browser.mainLoop.remainingBlockers,r=Browser.mainLoop.expectedBlockers;e?e"u"&&(console.log("warning: Browser does not support creating object URLs. Built-in browser image decoding will not be available."),Module.noImageDecoding=!0);var t={};t.canHandle=function(n){return!Module.noImageDecoding&&/\.(jpg|jpeg|png|bmp)$/i.test(n)},t.handle=function(n,u,A,p){var h=null;if(Browser.hasBlobConstructor)try{h=new Blob([n],{type:Browser.getMimetype(u)}),h.size!==n.length&&(h=new Blob([new Uint8Array(n).buffer],{type:Browser.getMimetype(u)}))}catch(x){Runtime.warnOnce("Blob constructor present but fails: "+x+"; falling back to blob builder")}if(!h){var E=new Browser.BlobBuilder;E.append(new Uint8Array(n).buffer),h=E.getBlob()}var I=Browser.URLObject.createObjectURL(h),v=new Image;v.onload=function(){assert(v.complete,"Image "+u+" could not be decoded");var C=document.createElement("canvas");C.width=v.width,C.height=v.height;var R=C.getContext("2d");R.drawImage(v,0,0),Module.preloadedImages[u]=C,Browser.URLObject.revokeObjectURL(I),A&&A(n)},v.onerror=function(C){console.log("Image "+I+" could not be decoded"),p&&p()},v.src=I},Module.preloadPlugins.push(t);var e={};e.canHandle=function(n){return!Module.noAudioDecoding&&n.substr(-4)in{".ogg":1,".wav":1,".mp3":1}},e.handle=function(n,u,A,p){var h=!1;function E(R){h||(h=!0,Module.preloadedAudios[u]=R,A&&A(n))}function I(){h||(h=!0,Module.preloadedAudios[u]=new Audio,p&&p())}if(Browser.hasBlobConstructor){try{var v=new Blob([n],{type:Browser.getMimetype(u)})}catch{return I()}var x=Browser.URLObject.createObjectURL(v),C=new Audio;C.addEventListener("canplaythrough",function(){E(C)},!1),C.onerror=function(L){if(h)return;console.log("warning: browser could not fully decode audio "+u+", trying slower base64 approach");function U(z){for(var te="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",ae="=",le="",ce=0,Ce=0,de=0;de=6;){var Be=ce>>Ce-6&63;Ce-=6,le+=te[Be]}return Ce==2?(le+=te[(ce&3)<<4],le+=ae+ae):Ce==4&&(le+=te[(ce&15)<<2],le+=ae),le}C.src="data:audio/x-"+u.substr(-3)+";base64,"+U(n),E(C)},C.src=x,Browser.safeSetTimeout(function(){E(C)},1e4)}else return I()},Module.preloadPlugins.push(e);function r(){Browser.pointerLock=document.pointerLockElement===Module.canvas||document.mozPointerLockElement===Module.canvas||document.webkitPointerLockElement===Module.canvas||document.msPointerLockElement===Module.canvas}var o=Module.canvas;o&&(o.requestPointerLock=o.requestPointerLock||o.mozRequestPointerLock||o.webkitRequestPointerLock||o.msRequestPointerLock||function(){},o.exitPointerLock=document.exitPointerLock||document.mozExitPointerLock||document.webkitExitPointerLock||document.msExitPointerLock||function(){},o.exitPointerLock=o.exitPointerLock.bind(document),document.addEventListener("pointerlockchange",r,!1),document.addEventListener("mozpointerlockchange",r,!1),document.addEventListener("webkitpointerlockchange",r,!1),document.addEventListener("mspointerlockchange",r,!1),Module.elementPointerLock&&o.addEventListener("click",function(a){!Browser.pointerLock&&Module.canvas.requestPointerLock&&(Module.canvas.requestPointerLock(),a.preventDefault())},!1))},createContext:function(t,e,r,o){if(e&&Module.ctx&&t==Module.canvas)return Module.ctx;var a,n;if(e){var u={antialias:!1,alpha:!1};if(o)for(var A in o)u[A]=o[A];n=GL.createContext(t,u),n&&(a=GL.getContext(n).GLctx)}else a=t.getContext("2d");return a?(r&&(e||assert(typeof GLctx>"u","cannot set in module if GLctx is used, but we are a non-GL context that would replace it"),Module.ctx=a,e&&GL.makeContextCurrent(n),Module.useWebGL=e,Browser.moduleContextCreatedCallbacks.forEach(function(p){p()}),Browser.init()),a):null},destroyContext:function(t,e,r){},fullscreenHandlersInstalled:!1,lockPointer:void 0,resizeCanvas:void 0,requestFullscreen:function(t,e,r){Browser.lockPointer=t,Browser.resizeCanvas=e,Browser.vrDevice=r,typeof Browser.lockPointer>"u"&&(Browser.lockPointer=!0),typeof Browser.resizeCanvas>"u"&&(Browser.resizeCanvas=!1),typeof Browser.vrDevice>"u"&&(Browser.vrDevice=null);var o=Module.canvas;function a(){Browser.isFullscreen=!1;var u=o.parentNode;(document.fullscreenElement||document.mozFullScreenElement||document.msFullscreenElement||document.webkitFullscreenElement||document.webkitCurrentFullScreenElement)===u?(o.exitFullscreen=document.exitFullscreen||document.cancelFullScreen||document.mozCancelFullScreen||document.msExitFullscreen||document.webkitCancelFullScreen||function(){},o.exitFullscreen=o.exitFullscreen.bind(document),Browser.lockPointer&&o.requestPointerLock(),Browser.isFullscreen=!0,Browser.resizeCanvas&&Browser.setFullscreenCanvasSize()):(u.parentNode.insertBefore(o,u),u.parentNode.removeChild(u),Browser.resizeCanvas&&Browser.setWindowedCanvasSize()),Module.onFullScreen&&Module.onFullScreen(Browser.isFullscreen),Module.onFullscreen&&Module.onFullscreen(Browser.isFullscreen),Browser.updateCanvasDimensions(o)}Browser.fullscreenHandlersInstalled||(Browser.fullscreenHandlersInstalled=!0,document.addEventListener("fullscreenchange",a,!1),document.addEventListener("mozfullscreenchange",a,!1),document.addEventListener("webkitfullscreenchange",a,!1),document.addEventListener("MSFullscreenChange",a,!1));var n=document.createElement("div");o.parentNode.insertBefore(n,o),n.appendChild(o),n.requestFullscreen=n.requestFullscreen||n.mozRequestFullScreen||n.msRequestFullscreen||(n.webkitRequestFullscreen?function(){n.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT)}:null)||(n.webkitRequestFullScreen?function(){n.webkitRequestFullScreen(Element.ALLOW_KEYBOARD_INPUT)}:null),r?n.requestFullscreen({vrDisplay:r}):n.requestFullscreen()},requestFullScreen:function(t,e,r){return Module.printErr("Browser.requestFullScreen() is deprecated. Please call Browser.requestFullscreen instead."),Browser.requestFullScreen=function(o,a,n){return Browser.requestFullscreen(o,a,n)},Browser.requestFullscreen(t,e,r)},nextRAF:0,fakeRequestAnimationFrame:function(t){var e=Date.now();if(Browser.nextRAF===0)Browser.nextRAF=e+1e3/60;else for(;e+2>=Browser.nextRAF;)Browser.nextRAF+=1e3/60;var r=Math.max(Browser.nextRAF-e,0);setTimeout(t,r)},requestAnimationFrame:function t(e){typeof window>"u"?Browser.fakeRequestAnimationFrame(e):(window.requestAnimationFrame||(window.requestAnimationFrame=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||window.oRequestAnimationFrame||Browser.fakeRequestAnimationFrame),window.requestAnimationFrame(e))},safeCallback:function(t){return function(){if(!ABORT)return t.apply(null,arguments)}},allowAsyncCallbacks:!0,queuedAsyncCallbacks:[],pauseAsyncCallbacks:function(){Browser.allowAsyncCallbacks=!1},resumeAsyncCallbacks:function(){if(Browser.allowAsyncCallbacks=!0,Browser.queuedAsyncCallbacks.length>0){var t=Browser.queuedAsyncCallbacks;Browser.queuedAsyncCallbacks=[],t.forEach(function(e){e()})}},safeRequestAnimationFrame:function(t){return Browser.requestAnimationFrame(function(){ABORT||(Browser.allowAsyncCallbacks?t():Browser.queuedAsyncCallbacks.push(t))})},safeSetTimeout:function(t,e){return Module.noExitRuntime=!0,setTimeout(function(){ABORT||(Browser.allowAsyncCallbacks?t():Browser.queuedAsyncCallbacks.push(t))},e)},safeSetInterval:function(t,e){return Module.noExitRuntime=!0,setInterval(function(){ABORT||Browser.allowAsyncCallbacks&&t()},e)},getMimetype:function(t){return{jpg:"image/jpeg",jpeg:"image/jpeg",png:"image/png",bmp:"image/bmp",ogg:"audio/ogg",wav:"audio/wav",mp3:"audio/mpeg"}[t.substr(t.lastIndexOf(".")+1)]},getUserMedia:function(t){window.getUserMedia||(window.getUserMedia=navigator.getUserMedia||navigator.mozGetUserMedia),window.getUserMedia(t)},getMovementX:function(t){return t.movementX||t.mozMovementX||t.webkitMovementX||0},getMovementY:function(t){return t.movementY||t.mozMovementY||t.webkitMovementY||0},getMouseWheelDelta:function(t){var e=0;switch(t.type){case"DOMMouseScroll":e=t.detail;break;case"mousewheel":e=t.wheelDelta;break;case"wheel":e=t.deltaY;break;default:throw"unrecognized mouse wheel event: "+t.type}return e},mouseX:0,mouseY:0,mouseMovementX:0,mouseMovementY:0,touches:{},lastTouches:{},calculateMouseEvent:function(t){if(Browser.pointerLock)t.type!="mousemove"&&"mozMovementX"in t?Browser.mouseMovementX=Browser.mouseMovementY=0:(Browser.mouseMovementX=Browser.getMovementX(t),Browser.mouseMovementY=Browser.getMovementY(t)),typeof SDL<"u"?(Browser.mouseX=SDL.mouseX+Browser.mouseMovementX,Browser.mouseY=SDL.mouseY+Browser.mouseMovementY):(Browser.mouseX+=Browser.mouseMovementX,Browser.mouseY+=Browser.mouseMovementY);else{var e=Module.canvas.getBoundingClientRect(),r=Module.canvas.width,o=Module.canvas.height,a=typeof window.scrollX<"u"?window.scrollX:window.pageXOffset,n=typeof window.scrollY<"u"?window.scrollY:window.pageYOffset;if(t.type==="touchstart"||t.type==="touchend"||t.type==="touchmove"){var u=t.touch;if(u===void 0)return;var A=u.pageX-(a+e.left),p=u.pageY-(n+e.top);A=A*(r/e.width),p=p*(o/e.height);var h={x:A,y:p};if(t.type==="touchstart")Browser.lastTouches[u.identifier]=h,Browser.touches[u.identifier]=h;else if(t.type==="touchend"||t.type==="touchmove"){var E=Browser.touches[u.identifier];E||(E=h),Browser.lastTouches[u.identifier]=E,Browser.touches[u.identifier]=h}return}var I=t.pageX-(a+e.left),v=t.pageY-(n+e.top);I=I*(r/e.width),v=v*(o/e.height),Browser.mouseMovementX=I-Browser.mouseX,Browser.mouseMovementY=v-Browser.mouseY,Browser.mouseX=I,Browser.mouseY=v}},asyncLoad:function(t,e,r,o){var a=o?"":"al "+t;Module.readAsync(t,function(n){assert(n,'Loading data file "'+t+'" failed (no arrayBuffer).'),e(new Uint8Array(n)),a&&removeRunDependency(a)},function(n){if(r)r();else throw'Loading data file "'+t+'" failed.'}),a&&addRunDependency(a)},resizeListeners:[],updateResizeListeners:function(){var t=Module.canvas;Browser.resizeListeners.forEach(function(e){e(t.width,t.height)})},setCanvasSize:function(t,e,r){var o=Module.canvas;Browser.updateCanvasDimensions(o,t,e),r||Browser.updateResizeListeners()},windowedWidth:0,windowedHeight:0,setFullscreenCanvasSize:function(){if(typeof SDL<"u"){var t=HEAPU32[SDL.screen+Runtime.QUANTUM_SIZE*0>>2];t=t|8388608,HEAP32[SDL.screen+Runtime.QUANTUM_SIZE*0>>2]=t}Browser.updateResizeListeners()},setWindowedCanvasSize:function(){if(typeof SDL<"u"){var t=HEAPU32[SDL.screen+Runtime.QUANTUM_SIZE*0>>2];t=t&-8388609,HEAP32[SDL.screen+Runtime.QUANTUM_SIZE*0>>2]=t}Browser.updateResizeListeners()},updateCanvasDimensions:function(t,e,r){e&&r?(t.widthNative=e,t.heightNative=r):(e=t.widthNative,r=t.heightNative);var o=e,a=r;if(Module.forcedAspectRatio&&Module.forcedAspectRatio>0&&(o/a>2];return e},getStr:function(){var t=Pointer_stringify(SYSCALLS.get());return t},get64:function(){var t=SYSCALLS.get(),e=SYSCALLS.get();return t>=0?assert(e===0):assert(e===-1),t},getZero:function(){assert(SYSCALLS.get()===0)}};function ___syscall6(t,e){SYSCALLS.varargs=e;try{var r=SYSCALLS.getStreamFromFD();return FS.close(r),0}catch(o){return(typeof FS>"u"||!(o instanceof FS.ErrnoError))&&abort(o),-o.errno}}function ___syscall54(t,e){SYSCALLS.varargs=e;try{return 0}catch(r){return(typeof FS>"u"||!(r instanceof FS.ErrnoError))&&abort(r),-r.errno}}function _typeModule(t){var e=[[0,1,"X"],[1,1,"const X"],[128,1,"X *"],[256,1,"X &"],[384,1,"X &&"],[512,1,"std::shared_ptr"],[640,1,"std::unique_ptr"],[5120,1,"std::vector"],[6144,2,"std::array"],[9216,-1,"std::function"]];function r(p,h,E,I,v,x){if(h==1){var C=I&896;(C==128||C==256||C==384)&&(p="X const")}var R;return x?R=E.replace("X",p).replace("Y",v):R=p.replace("X",E).replace("Y",v),R.replace(/([*&]) (?=[*&])/g,"$1")}function o(p,h,E,I,v){throw new Error(p+" type "+E.replace("X",h+"?")+(I?" with flag "+I:"")+" in "+v)}function a(p,h,E,I,v,x,C,R){x===void 0&&(x="X"),R===void 0&&(R=1);var L=E(p);if(L)return L;var U=I(p),z=U.placeholderFlag,te=e[z];C&&te&&(x=r(C[2],C[0],x,te[0],"?",!0));var ae;z==0&&(ae="Unbound"),z>=10&&(ae="Corrupt"),R>20&&(ae="Deeply nested"),ae&&o(ae,p,x,z,v||"?");var le=U.paramList[0],ce=a(le,h,E,I,v,x,te,R+1),Ce,de={flags:te[0],id:p,name:"",paramList:[ce]},Be=[],Ee="?";switch(U.placeholderFlag){case 1:Ce=ce.spec;break;case 2:if((ce.flags&15360)==1024&&ce.spec.ptrSize==1){de.flags=7168;break}case 3:case 6:case 5:Ce=ce.spec,ce.flags&15360;break;case 8:Ee=""+U.paramList[1],de.paramList.push(U.paramList[1]);break;case 9:for(var g=0,me=U.paramList[1];g>2]=t),t}function _llvm_stacksave(){var t=_llvm_stacksave;return t.LLVM_SAVEDSTACKS||(t.LLVM_SAVEDSTACKS=[]),t.LLVM_SAVEDSTACKS.push(Runtime.stackSave()),t.LLVM_SAVEDSTACKS.length-1}function ___syscall140(t,e){SYSCALLS.varargs=e;try{var r=SYSCALLS.getStreamFromFD(),o=SYSCALLS.get(),a=SYSCALLS.get(),n=SYSCALLS.get(),u=SYSCALLS.get(),A=a;return FS.llseek(r,A,u),HEAP32[n>>2]=r.position,r.getdents&&A===0&&u===0&&(r.getdents=null),0}catch(p){return(typeof FS>"u"||!(p instanceof FS.ErrnoError))&&abort(p),-p.errno}}function ___syscall146(t,e){SYSCALLS.varargs=e;try{var r=SYSCALLS.get(),o=SYSCALLS.get(),a=SYSCALLS.get(),n=0;___syscall146.buffer||(___syscall146.buffers=[null,[],[]],___syscall146.printChar=function(E,I){var v=___syscall146.buffers[E];assert(v),I===0||I===10?((E===1?Module.print:Module.printErr)(UTF8ArrayToString(v,0)),v.length=0):v.push(I)});for(var u=0;u>2],p=HEAP32[o+(u*8+4)>>2],h=0;h"u"||!(E instanceof FS.ErrnoError))&&abort(E),-E.errno}}function __nbind_finish(){for(var t=0,e=_nbind.BindClass.list;tt.pageSize/2||e>t.pageSize-r){var o=_nbind.typeNameTbl.NBind.proto;return o.lalloc(e)}else return HEAPU32[t.usedPtr]=r+e,t.rootPtr+r},t.lreset=function(e,r){var o=HEAPU32[t.pagePtr];if(o){var a=_nbind.typeNameTbl.NBind.proto;a.lreset(e,r)}else HEAPU32[t.usedPtr]=e},t}();_nbind.Pool=Pool;function constructType(t,e){var r=t==10240?_nbind.makeTypeNameTbl[e.name]||_nbind.BindType:_nbind.makeTypeKindTbl[t],o=new r(e);return typeIdTbl[e.id]=o,_nbind.typeNameTbl[e.name]=o,o}_nbind.constructType=constructType;function getType(t){return typeIdTbl[t]}_nbind.getType=getType;function queryType(t){var e=HEAPU8[t],r=_nbind.structureList[e][1];t/=4,r<0&&(++t,r=HEAPU32[t]+1);var o=Array.prototype.slice.call(HEAPU32.subarray(t+1,t+1+r));return e==9&&(o=[o[0],o.slice(1)]),{paramList:o,placeholderFlag:e}}_nbind.queryType=queryType;function getTypes(t,e){return t.map(function(r){return typeof r=="number"?_nbind.getComplexType(r,constructType,getType,queryType,e):_nbind.typeNameTbl[r]})}_nbind.getTypes=getTypes;function readTypeIdList(t,e){return Array.prototype.slice.call(HEAPU32,t/4,t/4+e)}_nbind.readTypeIdList=readTypeIdList;function readAsciiString(t){for(var e=t;HEAPU8[e++];);return String.fromCharCode.apply("",HEAPU8.subarray(t,e-1))}_nbind.readAsciiString=readAsciiString;function readPolicyList(t){var e={};if(t)for(;;){var r=HEAPU32[t/4];if(!r)break;e[readAsciiString(r)]=!0,t+=4}return e}_nbind.readPolicyList=readPolicyList;function getDynCall(t,e){var r={float32_t:"d",float64_t:"d",int64_t:"d",uint64_t:"d",void:"v"},o=t.map(function(n){return r[n.name]||"i"}).join(""),a=Module["dynCall_"+o];if(!a)throw new Error("dynCall_"+o+" not found for "+e+"("+t.map(function(n){return n.name}).join(", ")+")");return a}_nbind.getDynCall=getDynCall;function addMethod(t,e,r,o){var a=t[e];t.hasOwnProperty(e)&&a?((a.arity||a.arity===0)&&(a=_nbind.makeOverloader(a,a.arity),t[e]=a),a.addMethod(r,o)):(r.arity=o,t[e]=r)}_nbind.addMethod=addMethod;function throwError(t){throw new Error(t)}_nbind.throwError=throwError,_nbind.bigEndian=!1,_a=_typeModule(_typeModule),_nbind.Type=_a.Type,_nbind.makeType=_a.makeType,_nbind.getComplexType=_a.getComplexType,_nbind.structureList=_a.structureList;var BindType=function(t){__extends(e,t);function e(){var r=t!==null&&t.apply(this,arguments)||this;return r.heap=HEAPU32,r.ptrSize=4,r}return e.prototype.needsWireRead=function(r){return!!this.wireRead||!!this.makeWireRead},e.prototype.needsWireWrite=function(r){return!!this.wireWrite||!!this.makeWireWrite},e}(_nbind.Type);_nbind.BindType=BindType;var PrimitiveType=function(t){__extends(e,t);function e(r){var o=t.call(this,r)||this,a=r.flags&32?{32:HEAPF32,64:HEAPF64}:r.flags&8?{8:HEAPU8,16:HEAPU16,32:HEAPU32}:{8:HEAP8,16:HEAP16,32:HEAP32};return o.heap=a[r.ptrSize*8],o.ptrSize=r.ptrSize,o}return e.prototype.needsWireWrite=function(r){return!!r&&!!r.Strict},e.prototype.makeWireWrite=function(r,o){return o&&o.Strict&&function(a){if(typeof a=="number")return a;throw new Error("Type mismatch")}},e}(BindType);_nbind.PrimitiveType=PrimitiveType;function pushCString(t,e){if(t==null){if(e&&e.Nullable)return 0;throw new Error("Type mismatch")}if(e&&e.Strict){if(typeof t!="string")throw new Error("Type mismatch")}else t=t.toString();var r=Module.lengthBytesUTF8(t)+1,o=_nbind.Pool.lalloc(r);return Module.stringToUTF8Array(t,HEAPU8,o,r),o}_nbind.pushCString=pushCString;function popCString(t){return t===0?null:Module.Pointer_stringify(t)}_nbind.popCString=popCString;var CStringType=function(t){__extends(e,t);function e(){var r=t!==null&&t.apply(this,arguments)||this;return r.wireRead=popCString,r.wireWrite=pushCString,r.readResources=[_nbind.resources.pool],r.writeResources=[_nbind.resources.pool],r}return e.prototype.makeWireWrite=function(r,o){return function(a){return pushCString(a,o)}},e}(BindType);_nbind.CStringType=CStringType;var BooleanType=function(t){__extends(e,t);function e(){var r=t!==null&&t.apply(this,arguments)||this;return r.wireRead=function(o){return!!o},r}return e.prototype.needsWireWrite=function(r){return!!r&&!!r.Strict},e.prototype.makeWireRead=function(r){return"!!("+r+")"},e.prototype.makeWireWrite=function(r,o){return o&&o.Strict&&function(a){if(typeof a=="boolean")return a;throw new Error("Type mismatch")}||r},e}(BindType);_nbind.BooleanType=BooleanType;var Wrapper=function(){function t(){}return t.prototype.persist=function(){this.__nbindState|=1},t}();_nbind.Wrapper=Wrapper;function makeBound(t,e){var r=function(o){__extends(a,o);function a(n,u,A,p){var h=o.call(this)||this;if(!(h instanceof a))return new(Function.prototype.bind.apply(a,Array.prototype.concat.apply([null],arguments)));var E=u,I=A,v=p;if(n!==_nbind.ptrMarker){var x=h.__nbindConstructor.apply(h,arguments);E=4608,v=HEAPU32[x/4],I=HEAPU32[x/4+1]}var C={configurable:!0,enumerable:!1,value:null,writable:!1},R={__nbindFlags:E,__nbindPtr:I};v&&(R.__nbindShared=v,_nbind.mark(h));for(var L=0,U=Object.keys(R);L>=1;var r=_nbind.valueList[t];return _nbind.valueList[t]=firstFreeValue,firstFreeValue=t,r}else{if(e)return _nbind.popShared(t,e);throw new Error("Invalid value slot "+t)}}_nbind.popValue=popValue;var valueBase=18446744073709552e3;function push64(t){return typeof t=="number"?t:pushValue(t)*4096+valueBase}function pop64(t){return t=3?u=Buffer.from(n):u=new Buffer(n),u.copy(o)}else getBuffer(o).set(n)}}_nbind.commitBuffer=commitBuffer;var dirtyList=[],gcTimer=0;function sweep(){for(var t=0,e=dirtyList;t>2]=DYNAMIC_BASE,staticSealed=!0;function invoke_viiiii(t,e,r,o,a,n){try{Module.dynCall_viiiii(t,e,r,o,a,n)}catch(u){if(typeof u!="number"&&u!=="longjmp")throw u;Module.setThrew(1,0)}}function invoke_vif(t,e,r){try{Module.dynCall_vif(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_vid(t,e,r){try{Module.dynCall_vid(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_fiff(t,e,r,o){try{return Module.dynCall_fiff(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_vi(t,e){try{Module.dynCall_vi(t,e)}catch(r){if(typeof r!="number"&&r!=="longjmp")throw r;Module.setThrew(1,0)}}function invoke_vii(t,e,r){try{Module.dynCall_vii(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_ii(t,e){try{return Module.dynCall_ii(t,e)}catch(r){if(typeof r!="number"&&r!=="longjmp")throw r;Module.setThrew(1,0)}}function invoke_viddi(t,e,r,o,a){try{Module.dynCall_viddi(t,e,r,o,a)}catch(n){if(typeof n!="number"&&n!=="longjmp")throw n;Module.setThrew(1,0)}}function invoke_vidd(t,e,r,o){try{Module.dynCall_vidd(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_iiii(t,e,r,o){try{return Module.dynCall_iiii(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_diii(t,e,r,o){try{return Module.dynCall_diii(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_di(t,e){try{return Module.dynCall_di(t,e)}catch(r){if(typeof r!="number"&&r!=="longjmp")throw r;Module.setThrew(1,0)}}function invoke_iid(t,e,r){try{return Module.dynCall_iid(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_iii(t,e,r){try{return Module.dynCall_iii(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_viiddi(t,e,r,o,a,n){try{Module.dynCall_viiddi(t,e,r,o,a,n)}catch(u){if(typeof u!="number"&&u!=="longjmp")throw u;Module.setThrew(1,0)}}function invoke_viiiiii(t,e,r,o,a,n,u){try{Module.dynCall_viiiiii(t,e,r,o,a,n,u)}catch(A){if(typeof A!="number"&&A!=="longjmp")throw A;Module.setThrew(1,0)}}function invoke_dii(t,e,r){try{return Module.dynCall_dii(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_i(t){try{return Module.dynCall_i(t)}catch(e){if(typeof e!="number"&&e!=="longjmp")throw e;Module.setThrew(1,0)}}function invoke_iiiiii(t,e,r,o,a,n){try{return Module.dynCall_iiiiii(t,e,r,o,a,n)}catch(u){if(typeof u!="number"&&u!=="longjmp")throw u;Module.setThrew(1,0)}}function invoke_viiid(t,e,r,o,a){try{Module.dynCall_viiid(t,e,r,o,a)}catch(n){if(typeof n!="number"&&n!=="longjmp")throw n;Module.setThrew(1,0)}}function invoke_viififi(t,e,r,o,a,n,u){try{Module.dynCall_viififi(t,e,r,o,a,n,u)}catch(A){if(typeof A!="number"&&A!=="longjmp")throw A;Module.setThrew(1,0)}}function invoke_viii(t,e,r,o){try{Module.dynCall_viii(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_v(t){try{Module.dynCall_v(t)}catch(e){if(typeof e!="number"&&e!=="longjmp")throw e;Module.setThrew(1,0)}}function invoke_viid(t,e,r,o){try{Module.dynCall_viid(t,e,r,o)}catch(a){if(typeof a!="number"&&a!=="longjmp")throw a;Module.setThrew(1,0)}}function invoke_idd(t,e,r){try{return Module.dynCall_idd(t,e,r)}catch(o){if(typeof o!="number"&&o!=="longjmp")throw o;Module.setThrew(1,0)}}function invoke_viiii(t,e,r,o,a){try{Module.dynCall_viiii(t,e,r,o,a)}catch(n){if(typeof n!="number"&&n!=="longjmp")throw n;Module.setThrew(1,0)}}Module.asmGlobalArg={Math,Int8Array,Int16Array,Int32Array,Uint8Array,Uint16Array,Uint32Array,Float32Array,Float64Array,NaN:NaN,Infinity:1/0},Module.asmLibraryArg={abort,assert,enlargeMemory,getTotalMemory,abortOnCannotGrowMemory,invoke_viiiii,invoke_vif,invoke_vid,invoke_fiff,invoke_vi,invoke_vii,invoke_ii,invoke_viddi,invoke_vidd,invoke_iiii,invoke_diii,invoke_di,invoke_iid,invoke_iii,invoke_viiddi,invoke_viiiiii,invoke_dii,invoke_i,invoke_iiiiii,invoke_viiid,invoke_viififi,invoke_viii,invoke_v,invoke_viid,invoke_idd,invoke_viiii,_emscripten_asm_const_iiiii,_emscripten_asm_const_iiidddddd,_emscripten_asm_const_iiiid,__nbind_reference_external,_emscripten_asm_const_iiiiiiii,_removeAccessorPrefix,_typeModule,__nbind_register_pool,__decorate,_llvm_stackrestore,___cxa_atexit,__extends,__nbind_get_value_object,__ZN8facebook4yoga14YGNodeToStringEPNSt3__212basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEP6YGNode14YGPrintOptionsj,_emscripten_set_main_loop_timing,__nbind_register_primitive,__nbind_register_type,_emscripten_memcpy_big,__nbind_register_function,___setErrNo,__nbind_register_class,__nbind_finish,_abort,_nbind_value,_llvm_stacksave,___syscall54,_defineHidden,_emscripten_set_main_loop,_emscripten_get_now,__nbind_register_callback_signature,_emscripten_asm_const_iiiiii,__nbind_free_external,_emscripten_asm_const_iiii,_emscripten_asm_const_iiididi,___syscall6,_atexit,___syscall140,___syscall146,DYNAMICTOP_PTR,tempDoublePtr,ABORT,STACKTOP,STACK_MAX,cttz_i8,___dso_handle};var asm=function(t,e,r){var o=new t.Int8Array(r),a=new t.Int16Array(r),n=new t.Int32Array(r),u=new t.Uint8Array(r),A=new t.Uint16Array(r),p=new t.Uint32Array(r),h=new t.Float32Array(r),E=new t.Float64Array(r),I=e.DYNAMICTOP_PTR|0,v=e.tempDoublePtr|0,x=e.ABORT|0,C=e.STACKTOP|0,R=e.STACK_MAX|0,L=e.cttz_i8|0,U=e.___dso_handle|0,z=0,te=0,ae=0,le=0,ce=t.NaN,Ce=t.Infinity,de=0,Be=0,Ee=0,g=0,me=0,we=0,Ae=t.Math.floor,ne=t.Math.abs,Z=t.Math.sqrt,xe=t.Math.pow,Ne=t.Math.cos,ht=t.Math.sin,H=t.Math.tan,rt=t.Math.acos,Te=t.Math.asin,Fe=t.Math.atan,ke=t.Math.atan2,Ye=t.Math.exp,Se=t.Math.log,et=t.Math.ceil,Ue=t.Math.imul,b=t.Math.min,w=t.Math.max,S=t.Math.clz32,y=t.Math.fround,F=e.abort,J=e.assert,X=e.enlargeMemory,$=e.getTotalMemory,ie=e.abortOnCannotGrowMemory,be=e.invoke_viiiii,Re=e.invoke_vif,at=e.invoke_vid,dt=e.invoke_fiff,jt=e.invoke_vi,tr=e.invoke_vii,St=e.invoke_ii,ln=e.invoke_viddi,kr=e.invoke_vidd,mr=e.invoke_iiii,br=e.invoke_diii,Kr=e.invoke_di,Kn=e.invoke_iid,Ms=e.invoke_iii,Ri=e.invoke_viiddi,gs=e.invoke_viiiiii,io=e.invoke_dii,Pi=e.invoke_i,Os=e.invoke_iiiiii,so=e.invoke_viiid,uc=e.invoke_viififi,Au=e.invoke_viii,sp=e.invoke_v,op=e.invoke_viid,Us=e.invoke_idd,Dn=e.invoke_viiii,oo=e._emscripten_asm_const_iiiii,_s=e._emscripten_asm_const_iiidddddd,ml=e._emscripten_asm_const_iiiid,yl=e.__nbind_reference_external,ao=e._emscripten_asm_const_iiiiiiii,Vn=e._removeAccessorPrefix,Mn=e._typeModule,Ti=e.__nbind_register_pool,On=e.__decorate,_i=e._llvm_stackrestore,ir=e.___cxa_atexit,Me=e.__extends,ii=e.__nbind_get_value_object,Ha=e.__ZN8facebook4yoga14YGNodeToStringEPNSt3__212basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEP6YGNode14YGPrintOptionsj,hr=e._emscripten_set_main_loop_timing,Ac=e.__nbind_register_primitive,fu=e.__nbind_register_type,fc=e._emscripten_memcpy_big,El=e.__nbind_register_function,vA=e.___setErrNo,pu=e.__nbind_register_class,Ie=e.__nbind_finish,Tt=e._abort,pc=e._nbind_value,Hi=e._llvm_stacksave,hu=e.___syscall54,Yt=e._defineHidden,Cl=e._emscripten_set_main_loop,DA=e._emscripten_get_now,ap=e.__nbind_register_callback_signature,hc=e._emscripten_asm_const_iiiiii,PA=e.__nbind_free_external,Qn=e._emscripten_asm_const_iiii,hi=e._emscripten_asm_const_iiididi,gc=e.___syscall6,SA=e._atexit,aa=e.___syscall140,Ni=e.___syscall146,_o=y(0);let Xe=y(0);function lo(s){s=s|0;var l=0;return l=C,C=C+s|0,C=C+15&-16,l|0}function dc(){return C|0}function gu(s){s=s|0,C=s}function qi(s,l){s=s|0,l=l|0,C=s,R=l}function du(s,l){s=s|0,l=l|0,z||(z=s,te=l)}function bA(s){s=s|0,we=s}function qa(){return we|0}function mc(){var s=0,l=0;Dr(8104,8,400)|0,Dr(8504,408,540)|0,s=9044,l=s+44|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));o[9088]=0,o[9089]=1,n[2273]=0,n[2274]=948,n[2275]=948,ir(17,8104,U|0)|0}function ds(s){s=s|0,ft(s+948|0)}function Ht(s){return s=y(s),((Su(s)|0)&2147483647)>>>0>2139095040|0}function Fn(s,l,c){s=s|0,l=l|0,c=c|0;e:do if(n[s+(l<<3)+4>>2]|0)s=s+(l<<3)|0;else{if((l|2|0)==3&&n[s+60>>2]|0){s=s+56|0;break}switch(l|0){case 0:case 2:case 4:case 5:{if(n[s+52>>2]|0){s=s+48|0;break e}break}default:}if(n[s+68>>2]|0){s=s+64|0;break}else{s=(l|1|0)==5?948:c;break}}while(!1);return s|0}function Ei(s){s=s|0;var l=0;return l=Jv(1e3)|0,la(s,(l|0)!=0,2456),n[2276]=(n[2276]|0)+1,Dr(l|0,8104,1e3)|0,o[s+2>>0]|0&&(n[l+4>>2]=2,n[l+12>>2]=4),n[l+976>>2]=s,l|0}function la(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;d=C,C=C+16|0,f=d,l||(n[f>>2]=c,g0(s,5,3197,f)),C=d}function co(){return Ei(956)|0}function Hs(s){s=s|0;var l=0;return l=Kt(1e3)|0,ca(l,s),la(n[s+976>>2]|0,1,2456),n[2276]=(n[2276]|0)+1,n[l+944>>2]=0,l|0}function ca(s,l){s=s|0,l=l|0;var c=0;Dr(s|0,l|0,948)|0,Cd(s+948|0,l+948|0),c=s+960|0,s=l+960|0,l=c+40|0;do n[c>>2]=n[s>>2],c=c+4|0,s=s+4|0;while((c|0)<(l|0))}function ua(s){s=s|0;var l=0,c=0,f=0,d=0;if(l=s+944|0,c=n[l>>2]|0,c|0&&(Ho(c+948|0,s)|0,n[l>>2]=0),c=Ci(s)|0,c|0){l=0;do n[(ms(s,l)|0)+944>>2]=0,l=l+1|0;while((l|0)!=(c|0))}c=s+948|0,f=n[c>>2]|0,d=s+952|0,l=n[d>>2]|0,(l|0)!=(f|0)&&(n[d>>2]=l+(~((l+-4-f|0)>>>2)<<2)),ys(c),Xv(s),n[2276]=(n[2276]|0)+-1}function Ho(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0;f=n[s>>2]|0,k=s+4|0,c=n[k>>2]|0,m=c;e:do if((f|0)==(c|0))d=f,B=4;else for(s=f;;){if((n[s>>2]|0)==(l|0)){d=s,B=4;break e}if(s=s+4|0,(s|0)==(c|0)){s=0;break}}while(!1);return(B|0)==4&&((d|0)!=(c|0)?(f=d+4|0,s=m-f|0,l=s>>2,l&&(ww(d|0,f|0,s|0)|0,c=n[k>>2]|0),s=d+(l<<2)|0,(c|0)==(s|0)||(n[k>>2]=c+(~((c+-4-s|0)>>>2)<<2)),s=1):s=0),s|0}function Ci(s){return s=s|0,(n[s+952>>2]|0)-(n[s+948>>2]|0)>>2|0}function ms(s,l){s=s|0,l=l|0;var c=0;return c=n[s+948>>2]|0,(n[s+952>>2]|0)-c>>2>>>0>l>>>0?s=n[c+(l<<2)>>2]|0:s=0,s|0}function ys(s){s=s|0;var l=0,c=0,f=0,d=0;f=C,C=C+32|0,l=f,d=n[s>>2]|0,c=(n[s+4>>2]|0)-d|0,((n[s+8>>2]|0)-d|0)>>>0>c>>>0&&(d=c>>2,Ep(l,d,d,s+8|0),E0(s,l),UA(l)),C=f}function Es(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0;O=Ci(s)|0;do if(O|0){if((n[(ms(s,0)|0)+944>>2]|0)==(s|0)){if(!(Ho(s+948|0,l)|0))break;Dr(l+400|0,8504,540)|0,n[l+944>>2]=0,Le(s);break}B=n[(n[s+976>>2]|0)+12>>2]|0,k=s+948|0,Q=(B|0)==0,c=0,m=0;do f=n[(n[k>>2]|0)+(m<<2)>>2]|0,(f|0)==(l|0)?Le(s):(d=Hs(f)|0,n[(n[k>>2]|0)+(c<<2)>>2]=d,n[d+944>>2]=s,Q||BR[B&15](f,d,s,c),c=c+1|0),m=m+1|0;while((m|0)!=(O|0));if(c>>>0>>0){Q=s+948|0,k=s+952|0,B=c,c=n[k>>2]|0;do m=(n[Q>>2]|0)+(B<<2)|0,f=m+4|0,d=c-f|0,l=d>>2,l&&(ww(m|0,f|0,d|0)|0,c=n[k>>2]|0),d=c,f=m+(l<<2)|0,(d|0)!=(f|0)&&(c=d+(~((d+-4-f|0)>>>2)<<2)|0,n[k>>2]=c),B=B+1|0;while((B|0)!=(O|0))}}while(!1)}function qs(s){s=s|0;var l=0,c=0,f=0,d=0;Un(s,(Ci(s)|0)==0,2491),Un(s,(n[s+944>>2]|0)==0,2545),l=s+948|0,c=n[l>>2]|0,f=s+952|0,d=n[f>>2]|0,(d|0)!=(c|0)&&(n[f>>2]=d+(~((d+-4-c|0)>>>2)<<2)),ys(l),l=s+976|0,c=n[l>>2]|0,Dr(s|0,8104,1e3)|0,o[c+2>>0]|0&&(n[s+4>>2]=2,n[s+12>>2]=4),n[l>>2]=c}function Un(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;d=C,C=C+16|0,f=d,l||(n[f>>2]=c,Ao(s,5,3197,f)),C=d}function Pn(){return n[2276]|0}function Cs(){var s=0;return s=Jv(20)|0,We((s|0)!=0,2592),n[2277]=(n[2277]|0)+1,n[s>>2]=n[239],n[s+4>>2]=n[240],n[s+8>>2]=n[241],n[s+12>>2]=n[242],n[s+16>>2]=n[243],s|0}function We(s,l){s=s|0,l=l|0;var c=0,f=0;f=C,C=C+16|0,c=f,s||(n[c>>2]=l,Ao(0,5,3197,c)),C=f}function tt(s){s=s|0,Xv(s),n[2277]=(n[2277]|0)+-1}function Bt(s,l){s=s|0,l=l|0;var c=0;l?(Un(s,(Ci(s)|0)==0,2629),c=1):(c=0,l=0),n[s+964>>2]=l,n[s+988>>2]=c}function or(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,m=f+8|0,d=f+4|0,B=f,n[d>>2]=l,Un(s,(n[l+944>>2]|0)==0,2709),Un(s,(n[s+964>>2]|0)==0,2763),ee(s),l=s+948|0,n[B>>2]=(n[l>>2]|0)+(c<<2),n[m>>2]=n[B>>2],ye(l,m,d)|0,n[(n[d>>2]|0)+944>>2]=s,Le(s),C=f}function ee(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0;if(c=Ci(s)|0,c|0&&(n[(ms(s,0)|0)+944>>2]|0)!=(s|0)){f=n[(n[s+976>>2]|0)+12>>2]|0,d=s+948|0,m=(f|0)==0,l=0;do B=n[(n[d>>2]|0)+(l<<2)>>2]|0,k=Hs(B)|0,n[(n[d>>2]|0)+(l<<2)>>2]=k,n[k+944>>2]=s,m||BR[f&15](B,k,s,l),l=l+1|0;while((l|0)!=(c|0))}}function ye(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0;$e=C,C=C+64|0,j=$e+52|0,k=$e+48|0,se=$e+28|0,je=$e+24|0,Oe=$e+20|0,Qe=$e,f=n[s>>2]|0,m=f,l=f+((n[l>>2]|0)-m>>2<<2)|0,f=s+4|0,d=n[f>>2]|0,B=s+8|0;do if(d>>>0<(n[B>>2]|0)>>>0){if((l|0)==(d|0)){n[l>>2]=n[c>>2],n[f>>2]=(n[f>>2]|0)+4;break}_A(s,l,d,l+4|0),l>>>0<=c>>>0&&(c=(n[f>>2]|0)>>>0>c>>>0?c+4|0:c),n[l>>2]=n[c>>2]}else{f=(d-m>>2)+1|0,d=N(s)|0,d>>>0>>0&&Jr(s),M=n[s>>2]|0,O=(n[B>>2]|0)-M|0,m=O>>1,Ep(Qe,O>>2>>>0>>1>>>0?m>>>0>>0?f:m:d,l-M>>2,s+8|0),M=Qe+8|0,f=n[M>>2]|0,m=Qe+12|0,O=n[m>>2]|0,B=O,Q=f;do if((f|0)==(O|0)){if(O=Qe+4|0,f=n[O>>2]|0,Je=n[Qe>>2]|0,d=Je,f>>>0<=Je>>>0){f=B-d>>1,f=f|0?f:1,Ep(se,f,f>>>2,n[Qe+16>>2]|0),n[je>>2]=n[O>>2],n[Oe>>2]=n[M>>2],n[k>>2]=n[je>>2],n[j>>2]=n[Oe>>2],lw(se,k,j),f=n[Qe>>2]|0,n[Qe>>2]=n[se>>2],n[se>>2]=f,f=se+4|0,Je=n[O>>2]|0,n[O>>2]=n[f>>2],n[f>>2]=Je,f=se+8|0,Je=n[M>>2]|0,n[M>>2]=n[f>>2],n[f>>2]=Je,f=se+12|0,Je=n[m>>2]|0,n[m>>2]=n[f>>2],n[f>>2]=Je,UA(se),f=n[M>>2]|0;break}m=f,B=((m-d>>2)+1|0)/-2|0,k=f+(B<<2)|0,d=Q-m|0,m=d>>2,m&&(ww(k|0,f|0,d|0)|0,f=n[O>>2]|0),Je=k+(m<<2)|0,n[M>>2]=Je,n[O>>2]=f+(B<<2),f=Je}while(!1);n[f>>2]=n[c>>2],n[M>>2]=(n[M>>2]|0)+4,l=C0(s,Qe,l)|0,UA(Qe)}while(!1);return C=$e,l|0}function Le(s){s=s|0;var l=0;do{if(l=s+984|0,o[l>>0]|0)break;o[l>>0]=1,h[s+504>>2]=y(ce),s=n[s+944>>2]|0}while(s|0)}function ft(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-4-f|0)>>>2)<<2)),gt(c))}function pt(s){return s=s|0,n[s+944>>2]|0}function Nt(s){s=s|0,Un(s,(n[s+964>>2]|0)!=0,2832),Le(s)}function rr(s){return s=s|0,(o[s+984>>0]|0)!=0|0}function $r(s,l){s=s|0,l=l|0,TUe(s,l,400)|0&&(Dr(s|0,l|0,400)|0,Le(s))}function ji(s){s=s|0;var l=Xe;return l=y(h[s+44>>2]),s=Ht(l)|0,y(s?y(0):l)}function rs(s){s=s|0;var l=Xe;return l=y(h[s+48>>2]),Ht(l)|0&&(l=o[(n[s+976>>2]|0)+2>>0]|0?y(1):y(0)),y(l)}function Si(s,l){s=s|0,l=l|0,n[s+980>>2]=l}function qo(s){return s=s|0,n[s+980>>2]|0}function xA(s,l){s=s|0,l=l|0;var c=0;c=s+4|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function kA(s){return s=s|0,n[s+4>>2]|0}function lp(s,l){s=s|0,l=l|0;var c=0;c=s+8|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function e0(s){return s=s|0,n[s+8>>2]|0}function mu(s,l){s=s|0,l=l|0;var c=0;c=s+12|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function t0(s){return s=s|0,n[s+12>>2]|0}function yu(s,l){s=s|0,l=l|0;var c=0;c=s+16|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function uo(s){return s=s|0,n[s+16>>2]|0}function QA(s,l){s=s|0,l=l|0;var c=0;c=s+20|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function yc(s){return s=s|0,n[s+20>>2]|0}function Aa(s,l){s=s|0,l=l|0;var c=0;c=s+24|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function r0(s){return s=s|0,n[s+24>>2]|0}function Ec(s,l){s=s|0,l=l|0;var c=0;c=s+28|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function hd(s){return s=s|0,n[s+28>>2]|0}function n0(s,l){s=s|0,l=l|0;var c=0;c=s+32|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function $n(s){return s=s|0,n[s+32>>2]|0}function cp(s,l){s=s|0,l=l|0;var c=0;c=s+36|0,(n[c>>2]|0)!=(l|0)&&(n[c>>2]=l,Le(s))}function i0(s){return s=s|0,n[s+36>>2]|0}function FA(s,l){s=s|0,l=y(l);var c=0;c=s+40|0,y(h[c>>2])!=l&&(h[c>>2]=l,Le(s))}function js(s,l){s=s|0,l=y(l);var c=0;c=s+44|0,y(h[c>>2])!=l&&(h[c>>2]=l,Le(s))}function Eu(s,l){s=s|0,l=y(l);var c=0;c=s+48|0,y(h[c>>2])!=l&&(h[c>>2]=l,Le(s))}function ja(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+52|0,d=s+56|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function Gi(s,l){s=s|0,l=y(l);var c=0,f=0;f=s+52|0,c=s+56|0,y(h[f>>2])==l&&(n[c>>2]|0)==2||(h[f>>2]=l,f=Ht(l)|0,n[c>>2]=f?3:2,Le(s))}function fa(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+52|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function Cu(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=(m^1)&1,d=s+132+(l<<3)|0,l=s+132+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function ws(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=m?0:2,d=s+132+(l<<3)|0,l=s+132+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function Cc(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=l+132+(c<<3)|0,l=n[f+4>>2]|0,c=s,n[c>>2]=n[f>>2],n[c+4>>2]=l}function wc(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=(m^1)&1,d=s+60+(l<<3)|0,l=s+60+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function Y(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=m?0:2,d=s+60+(l<<3)|0,l=s+60+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function Dt(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=l+60+(c<<3)|0,l=n[f+4>>2]|0,c=s,n[c>>2]=n[f>>2],n[c+4>>2]=l}function wl(s,l){s=s|0,l=l|0;var c=0;c=s+60+(l<<3)+4|0,(n[c>>2]|0)!=3&&(h[s+60+(l<<3)>>2]=y(ce),n[c>>2]=3,Le(s))}function bi(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=(m^1)&1,d=s+204+(l<<3)|0,l=s+204+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function Ic(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=m?0:2,d=s+204+(l<<3)|0,l=s+204+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function ct(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=l+204+(c<<3)|0,l=n[f+4>>2]|0,c=s,n[c>>2]=n[f>>2],n[c+4>>2]=l}function wu(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0,m=0;m=Ht(c)|0,f=(m^1)&1,d=s+276+(l<<3)|0,l=s+276+(l<<3)+4|0,m|y(h[d>>2])==c&&(n[l>>2]|0)==(f|0)||(h[d>>2]=c,n[l>>2]=f,Le(s))}function s0(s,l){return s=s|0,l=l|0,y(h[s+276+(l<<3)>>2])}function tw(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+348|0,d=s+352|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function RA(s,l){s=s|0,l=y(l);var c=0,f=0;f=s+348|0,c=s+352|0,y(h[f>>2])==l&&(n[c>>2]|0)==2||(h[f>>2]=l,f=Ht(l)|0,n[c>>2]=f?3:2,Le(s))}function up(s){s=s|0;var l=0;l=s+352|0,(n[l>>2]|0)!=3&&(h[s+348>>2]=y(ce),n[l>>2]=3,Le(s))}function Br(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+348|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function Is(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+356|0,d=s+360|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function o0(s,l){s=s|0,l=y(l);var c=0,f=0;f=s+356|0,c=s+360|0,y(h[f>>2])==l&&(n[c>>2]|0)==2||(h[f>>2]=l,f=Ht(l)|0,n[c>>2]=f?3:2,Le(s))}function a0(s){s=s|0;var l=0;l=s+360|0,(n[l>>2]|0)!=3&&(h[s+356>>2]=y(ce),n[l>>2]=3,Le(s))}function l0(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+356|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function Ap(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+364|0,d=s+368|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function Bc(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=m?0:2,f=s+364|0,d=s+368|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function Ct(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+364|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function gd(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+372|0,d=s+376|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function c0(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=m?0:2,f=s+372|0,d=s+376|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function u0(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+372|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function Iu(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+380|0,d=s+384|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function dd(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=m?0:2,f=s+380|0,d=s+384|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function A0(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+380|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function Bu(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=(m^1)&1,f=s+388|0,d=s+392|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function rw(s,l){s=s|0,l=y(l);var c=0,f=0,d=0,m=0;m=Ht(l)|0,c=m?0:2,f=s+388|0,d=s+392|0,m|y(h[f>>2])==l&&(n[d>>2]|0)==(c|0)||(h[f>>2]=l,n[d>>2]=c,Le(s))}function md(s,l){s=s|0,l=l|0;var c=0,f=0;f=l+388|0,c=n[f+4>>2]|0,l=s,n[l>>2]=n[f>>2],n[l+4>>2]=c}function pa(s,l){s=s|0,l=y(l);var c=0;c=s+396|0,y(h[c>>2])!=l&&(h[c>>2]=l,Le(s))}function vc(s){return s=s|0,y(h[s+396>>2])}function Il(s){return s=s|0,y(h[s+400>>2])}function vu(s){return s=s|0,y(h[s+404>>2])}function f0(s){return s=s|0,y(h[s+408>>2])}function TA(s){return s=s|0,y(h[s+412>>2])}function fp(s){return s=s|0,y(h[s+416>>2])}function Ga(s){return s=s|0,y(h[s+420>>2])}function p0(s,l){switch(s=s|0,l=l|0,Un(s,(l|0)<6,2918),l|0){case 0:{l=(n[s+496>>2]|0)==2?5:4;break}case 2:{l=(n[s+496>>2]|0)==2?4:5;break}default:}return y(h[s+424+(l<<2)>>2])}function pp(s,l){switch(s=s|0,l=l|0,Un(s,(l|0)<6,2918),l|0){case 0:{l=(n[s+496>>2]|0)==2?5:4;break}case 2:{l=(n[s+496>>2]|0)==2?4:5;break}default:}return y(h[s+448+(l<<2)>>2])}function jo(s,l){switch(s=s|0,l=l|0,Un(s,(l|0)<6,2918),l|0){case 0:{l=(n[s+496>>2]|0)==2?5:4;break}case 2:{l=(n[s+496>>2]|0)==2?4:5;break}default:}return y(h[s+472+(l<<2)>>2])}function Bs(s,l){s=s|0,l=l|0;var c=0,f=Xe;return c=n[s+4>>2]|0,(c|0)==(n[l+4>>2]|0)?c?(f=y(h[s>>2]),s=y(ne(y(f-y(h[l>>2]))))>2]=0,n[f+4>>2]=0,n[f+8>>2]=0,Ha(f|0,s|0,l|0,0),Ao(s,3,(o[f+11>>0]|0)<0?n[f>>2]|0:f,c),n3e(f),C=c}function Go(s,l,c,f){s=y(s),l=y(l),c=c|0,f=f|0;var d=Xe;s=y(s*l),d=y(mR(s,y(1)));do if(wi(d,y(0))|0)s=y(s-d);else{if(s=y(s-d),wi(d,y(1))|0){s=y(s+y(1));break}if(c){s=y(s+y(1));break}f||(d>y(.5)?d=y(1):(f=wi(d,y(.5))|0,d=y(f?1:0)),s=y(s+d))}while(!1);return y(s/l)}function NA(s,l,c,f,d,m,B,k,Q,O,M,j,se){s=s|0,l=y(l),c=c|0,f=y(f),d=d|0,m=y(m),B=B|0,k=y(k),Q=y(Q),O=y(O),M=y(M),j=y(j),se=se|0;var je=0,Oe=Xe,Qe=Xe,$e=Xe,Je=Xe,lt=Xe,_e=Xe;return Q>2]),Oe!=y(0))?($e=y(Go(l,Oe,0,0)),Je=y(Go(f,Oe,0,0)),Qe=y(Go(m,Oe,0,0)),Oe=y(Go(k,Oe,0,0))):(Qe=m,$e=l,Oe=k,Je=f),(d|0)==(s|0)?je=wi(Qe,$e)|0:je=0,(B|0)==(c|0)?se=wi(Oe,Je)|0:se=0,!je&&(lt=y(l-M),!(hp(s,lt,Q)|0))&&!(gp(s,lt,d,Q)|0)?je=h0(s,lt,d,m,Q)|0:je=1,!se&&(_e=y(f-j),!(hp(c,_e,O)|0))&&!(gp(c,_e,B,O)|0)?se=h0(c,_e,B,k,O)|0:se=1,se=je&se),se|0}function hp(s,l,c){return s=s|0,l=y(l),c=y(c),(s|0)==1?s=wi(l,c)|0:s=0,s|0}function gp(s,l,c,f){return s=s|0,l=y(l),c=c|0,f=y(f),(s|0)==2&(c|0)==0?l>=f?s=1:s=wi(l,f)|0:s=0,s|0}function h0(s,l,c,f,d){return s=s|0,l=y(l),c=c|0,f=y(f),d=y(d),(s|0)==2&(c|0)==2&f>l?d<=l?s=1:s=wi(l,d)|0:s=0,s|0}function ha(s,l,c,f,d,m,B,k,Q,O,M){s=s|0,l=y(l),c=y(c),f=f|0,d=d|0,m=m|0,B=y(B),k=y(k),Q=Q|0,O=O|0,M=M|0;var j=0,se=0,je=0,Oe=0,Qe=Xe,$e=Xe,Je=0,lt=0,_e=0,qe=0,Lt=0,Or=0,cr=0,Xt=0,Pr=0,Tr=0,ar=0,xn=Xe,go=Xe,mo=Xe,yo=0,Ca=0;ar=C,C=C+160|0,Xt=ar+152|0,cr=ar+120|0,Or=ar+104|0,_e=ar+72|0,Oe=ar+56|0,Lt=ar+8|0,lt=ar,qe=(n[2279]|0)+1|0,n[2279]=qe,Pr=s+984|0,o[Pr>>0]|0&&(n[s+512>>2]|0)!=(n[2278]|0)?Je=4:(n[s+516>>2]|0)==(f|0)?Tr=0:Je=4,(Je|0)==4&&(n[s+520>>2]=0,n[s+924>>2]=-1,n[s+928>>2]=-1,h[s+932>>2]=y(-1),h[s+936>>2]=y(-1),Tr=1);e:do if(n[s+964>>2]|0)if(Qe=y(cn(s,2,B)),$e=y(cn(s,0,B)),j=s+916|0,mo=y(h[j>>2]),go=y(h[s+920>>2]),xn=y(h[s+932>>2]),NA(d,l,m,c,n[s+924>>2]|0,mo,n[s+928>>2]|0,go,xn,y(h[s+936>>2]),Qe,$e,M)|0)Je=22;else if(je=n[s+520>>2]|0,!je)Je=21;else for(se=0;;){if(j=s+524+(se*24|0)|0,xn=y(h[j>>2]),go=y(h[s+524+(se*24|0)+4>>2]),mo=y(h[s+524+(se*24|0)+16>>2]),NA(d,l,m,c,n[s+524+(se*24|0)+8>>2]|0,xn,n[s+524+(se*24|0)+12>>2]|0,go,mo,y(h[s+524+(se*24|0)+20>>2]),Qe,$e,M)|0){Je=22;break e}if(se=se+1|0,se>>>0>=je>>>0){Je=21;break}}else{if(Q){if(j=s+916|0,!(wi(y(h[j>>2]),l)|0)){Je=21;break}if(!(wi(y(h[s+920>>2]),c)|0)){Je=21;break}if((n[s+924>>2]|0)!=(d|0)){Je=21;break}j=(n[s+928>>2]|0)==(m|0)?j:0,Je=22;break}if(je=n[s+520>>2]|0,!je)Je=21;else for(se=0;;){if(j=s+524+(se*24|0)|0,wi(y(h[j>>2]),l)|0&&wi(y(h[s+524+(se*24|0)+4>>2]),c)|0&&(n[s+524+(se*24|0)+8>>2]|0)==(d|0)&&(n[s+524+(se*24|0)+12>>2]|0)==(m|0)){Je=22;break e}if(se=se+1|0,se>>>0>=je>>>0){Je=21;break}}}while(!1);do if((Je|0)==21)o[11697]|0?(j=0,Je=28):(j=0,Je=31);else if((Je|0)==22){if(se=(o[11697]|0)!=0,!((j|0)!=0&(Tr^1)))if(se){Je=28;break}else{Je=31;break}Oe=j+16|0,n[s+908>>2]=n[Oe>>2],je=j+20|0,n[s+912>>2]=n[je>>2],(o[11698]|0)==0|se^1||(n[lt>>2]=LA(qe)|0,n[lt+4>>2]=qe,Ao(s,4,2972,lt),se=n[s+972>>2]|0,se|0&&ef[se&127](s),d=Ya(d,Q)|0,m=Ya(m,Q)|0,Ca=+y(h[Oe>>2]),yo=+y(h[je>>2]),n[Lt>>2]=d,n[Lt+4>>2]=m,E[Lt+8>>3]=+l,E[Lt+16>>3]=+c,E[Lt+24>>3]=Ca,E[Lt+32>>3]=yo,n[Lt+40>>2]=O,Ao(s,4,2989,Lt))}while(!1);return(Je|0)==28&&(se=LA(qe)|0,n[Oe>>2]=se,n[Oe+4>>2]=qe,n[Oe+8>>2]=Tr?3047:11699,Ao(s,4,3038,Oe),se=n[s+972>>2]|0,se|0&&ef[se&127](s),Lt=Ya(d,Q)|0,Je=Ya(m,Q)|0,n[_e>>2]=Lt,n[_e+4>>2]=Je,E[_e+8>>3]=+l,E[_e+16>>3]=+c,n[_e+24>>2]=O,Ao(s,4,3049,_e),Je=31),(Je|0)==31&&(si(s,l,c,f,d,m,B,k,Q,M),o[11697]|0&&(se=n[2279]|0,Lt=LA(se)|0,n[Or>>2]=Lt,n[Or+4>>2]=se,n[Or+8>>2]=Tr?3047:11699,Ao(s,4,3083,Or),se=n[s+972>>2]|0,se|0&&ef[se&127](s),Lt=Ya(d,Q)|0,Or=Ya(m,Q)|0,yo=+y(h[s+908>>2]),Ca=+y(h[s+912>>2]),n[cr>>2]=Lt,n[cr+4>>2]=Or,E[cr+8>>3]=yo,E[cr+16>>3]=Ca,n[cr+24>>2]=O,Ao(s,4,3092,cr)),n[s+516>>2]=f,j||(se=s+520|0,j=n[se>>2]|0,(j|0)==16&&(o[11697]|0&&Ao(s,4,3124,Xt),n[se>>2]=0,j=0),Q?j=s+916|0:(n[se>>2]=j+1,j=s+524+(j*24|0)|0),h[j>>2]=l,h[j+4>>2]=c,n[j+8>>2]=d,n[j+12>>2]=m,n[j+16>>2]=n[s+908>>2],n[j+20>>2]=n[s+912>>2],j=0)),Q&&(n[s+416>>2]=n[s+908>>2],n[s+420>>2]=n[s+912>>2],o[s+985>>0]=1,o[Pr>>0]=0),n[2279]=(n[2279]|0)+-1,n[s+512>>2]=n[2278],C=ar,Tr|(j|0)==0|0}function cn(s,l,c){s=s|0,l=l|0,c=y(c);var f=Xe;return f=y(V(s,l,c)),y(f+y(re(s,l,c)))}function Ao(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=C,C=C+16|0,d=m,n[d>>2]=f,s?f=n[s+976>>2]|0:f=0,d0(f,s,l,c,d),C=m}function LA(s){return s=s|0,(s>>>0>60?3201:3201+(60-s)|0)|0}function Ya(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;return d=C,C=C+32|0,c=d+12|0,f=d,n[c>>2]=n[254],n[c+4>>2]=n[255],n[c+8>>2]=n[256],n[f>>2]=n[257],n[f+4>>2]=n[258],n[f+8>>2]=n[259],(s|0)>2?s=11699:s=n[(l?f:c)+(s<<2)>>2]|0,C=d,s|0}function si(s,l,c,f,d,m,B,k,Q,O){s=s|0,l=y(l),c=y(c),f=f|0,d=d|0,m=m|0,B=y(B),k=y(k),Q=Q|0,O=O|0;var M=0,j=0,se=0,je=0,Oe=Xe,Qe=Xe,$e=Xe,Je=Xe,lt=Xe,_e=Xe,qe=Xe,Lt=0,Or=0,cr=0,Xt=Xe,Pr=Xe,Tr=0,ar=Xe,xn=0,go=0,mo=0,yo=0,Ca=0,xp=0,kp=0,bl=0,Qp=0,Tu=0,Nu=0,Fp=0,Rp=0,Tp=0,Xr=0,xl=0,Np=0,kc=0,Lp=Xe,Mp=Xe,Lu=Xe,Mu=Xe,Qc=Xe,Ys=0,Za=0,Wo=0,kl=0,rf=0,nf=Xe,Ou=Xe,sf=Xe,of=Xe,Ws=Xe,Ps=Xe,Ql=0,Rn=Xe,af=Xe,Eo=Xe,Fc=Xe,Co=Xe,Rc=Xe,lf=0,cf=0,Tc=Xe,Ks=Xe,Fl=0,uf=0,Af=0,ff=0,xr=Xe,zn=0,Ss=0,wo=0,Vs=0,Fr=0,ur=0,Rl=0,zt=Xe,pf=0,li=0;Rl=C,C=C+16|0,Ys=Rl+12|0,Za=Rl+8|0,Wo=Rl+4|0,kl=Rl,Un(s,(d|0)==0|(Ht(l)|0)^1,3326),Un(s,(m|0)==0|(Ht(c)|0)^1,3406),Ss=mt(s,f)|0,n[s+496>>2]=Ss,Fr=fr(2,Ss)|0,ur=fr(0,Ss)|0,h[s+440>>2]=y(V(s,Fr,B)),h[s+444>>2]=y(re(s,Fr,B)),h[s+428>>2]=y(V(s,ur,B)),h[s+436>>2]=y(re(s,ur,B)),h[s+464>>2]=y(Cr(s,Fr)),h[s+468>>2]=y(yn(s,Fr)),h[s+452>>2]=y(Cr(s,ur)),h[s+460>>2]=y(yn(s,ur)),h[s+488>>2]=y(oi(s,Fr,B)),h[s+492>>2]=y(Li(s,Fr,B)),h[s+476>>2]=y(oi(s,ur,B)),h[s+484>>2]=y(Li(s,ur,B));do if(n[s+964>>2]|0)y0(s,l,c,d,m,B,k);else{if(wo=s+948|0,Vs=(n[s+952>>2]|0)-(n[wo>>2]|0)>>2,!Vs){Sv(s,l,c,d,m,B,k);break}if(!Q&&bv(s,l,c,d,m,B,k)|0)break;ee(s),xl=s+508|0,o[xl>>0]=0,Fr=fr(n[s+4>>2]|0,Ss)|0,ur=iw(Fr,Ss)|0,zn=he(Fr)|0,Np=n[s+8>>2]|0,uf=s+28|0,kc=(n[uf>>2]|0)!=0,Co=zn?B:k,Tc=zn?k:B,Lp=y(mp(s,Fr,B)),Mp=y(sw(s,Fr,B)),Oe=y(mp(s,ur,B)),Rc=y(En(s,Fr,B)),Ks=y(En(s,ur,B)),cr=zn?d:m,Fl=zn?m:d,xr=zn?Rc:Ks,lt=zn?Ks:Rc,Fc=y(cn(s,2,B)),Je=y(cn(s,0,B)),Qe=y(y(Yr(s+364|0,B))-xr),$e=y(y(Yr(s+380|0,B))-xr),_e=y(y(Yr(s+372|0,k))-lt),qe=y(y(Yr(s+388|0,k))-lt),Lu=zn?Qe:_e,Mu=zn?$e:qe,Fc=y(l-Fc),l=y(Fc-xr),Ht(l)|0?xr=l:xr=y(_n(y(k0(l,$e)),Qe)),af=y(c-Je),l=y(af-lt),Ht(l)|0?Eo=l:Eo=y(_n(y(k0(l,qe)),_e)),Qe=zn?xr:Eo,Rn=zn?Eo:xr;e:do if((cr|0)==1)for(f=0,j=0;;){if(M=ms(s,j)|0,!f)y(is(M))>y(0)&&y(Gs(M))>y(0)?f=M:f=0;else if(wd(M)|0){je=0;break e}if(j=j+1|0,j>>>0>=Vs>>>0){je=f;break}}else je=0;while(!1);Lt=je+500|0,Or=je+504|0,f=0,M=0,l=y(0),se=0;do{if(j=n[(n[wo>>2]|0)+(se<<2)>>2]|0,(n[j+36>>2]|0)==1)Du(j),o[j+985>>0]=1,o[j+984>>0]=0;else{Bl(j),Q&&dp(j,mt(j,Ss)|0,Qe,Rn,xr);do if((n[j+24>>2]|0)!=1)if((j|0)==(je|0)){n[Lt>>2]=n[2278],h[Or>>2]=y(0);break}else{Id(s,j,xr,d,Eo,xr,Eo,m,Ss,O);break}else M|0&&(n[M+960>>2]=j),n[j+960>>2]=0,M=j,f=f|0?f:j;while(!1);Ps=y(h[j+504>>2]),l=y(l+y(Ps+y(cn(j,Fr,xr))))}se=se+1|0}while((se|0)!=(Vs|0));for(mo=l>Qe,Ql=kc&((cr|0)==2&mo)?1:cr,xn=(Fl|0)==1,Ca=xn&(Q^1),xp=(Ql|0)==1,kp=(Ql|0)==2,bl=976+(Fr<<2)|0,Qp=(Fl|2|0)==2,Tp=xn&(kc^1),Tu=1040+(ur<<2)|0,Nu=1040+(Fr<<2)|0,Fp=976+(ur<<2)|0,Rp=(Fl|0)!=1,mo=kc&((cr|0)!=0&mo),go=s+976|0,xn=xn^1,l=Qe,Tr=0,yo=0,Ps=y(0),Qc=y(0);;){e:do if(Tr>>>0>>0)for(Or=n[wo>>2]|0,se=0,qe=y(0),_e=y(0),$e=y(0),Qe=y(0),j=0,M=0,je=Tr;;){if(Lt=n[Or+(je<<2)>>2]|0,(n[Lt+36>>2]|0)!=1&&(n[Lt+940>>2]=yo,(n[Lt+24>>2]|0)!=1)){if(Je=y(cn(Lt,Fr,xr)),Xr=n[bl>>2]|0,c=y(Yr(Lt+380+(Xr<<3)|0,Co)),lt=y(h[Lt+504>>2]),c=y(k0(c,lt)),c=y(_n(y(Yr(Lt+364+(Xr<<3)|0,Co)),c)),kc&(se|0)!=0&y(Je+y(_e+c))>l){m=se,Je=qe,cr=je;break e}Je=y(Je+c),c=y(_e+Je),Je=y(qe+Je),wd(Lt)|0&&($e=y($e+y(is(Lt))),Qe=y(Qe-y(lt*y(Gs(Lt))))),M|0&&(n[M+960>>2]=Lt),n[Lt+960>>2]=0,se=se+1|0,M=Lt,j=j|0?j:Lt}else Je=qe,c=_e;if(je=je+1|0,je>>>0>>0)qe=Je,_e=c;else{m=se,cr=je;break}}else m=0,Je=y(0),$e=y(0),Qe=y(0),j=0,cr=Tr;while(!1);Xr=$e>y(0)&$ey(0)&QeMu&((Ht(Mu)|0)^1))l=Mu,Xr=51;else if(o[(n[go>>2]|0)+3>>0]|0)Xr=51;else{if(Xt!=y(0)&&y(is(s))!=y(0)){Xr=53;break}l=Je,Xr=53}while(!1);if((Xr|0)==51&&(Xr=0,Ht(l)|0?Xr=53:(Pr=y(l-Je),ar=l)),(Xr|0)==53&&(Xr=0,Je>2]|0,je=Pry(0),_e=y(Pr/Xt),$e=y(0),Je=y(0),l=y(0),M=j;do c=y(Yr(M+380+(se<<3)|0,Co)),Qe=y(Yr(M+364+(se<<3)|0,Co)),Qe=y(k0(c,y(_n(Qe,y(h[M+504>>2]))))),je?(c=y(Qe*y(Gs(M))),c!=y(-0)&&(zt=y(Qe-y(lt*c)),nf=y(Ii(M,Fr,zt,ar,xr)),zt!=nf)&&($e=y($e-y(nf-Qe)),l=y(l+c))):Lt&&(Ou=y(is(M)),Ou!=y(0))&&(zt=y(Qe+y(_e*Ou)),sf=y(Ii(M,Fr,zt,ar,xr)),zt!=sf)&&($e=y($e-y(sf-Qe)),Je=y(Je-Ou)),M=n[M+960>>2]|0;while(M|0);if(l=y(qe+l),Qe=y(Pr+$e),rf)l=y(0);else{lt=y(Xt+Je),je=n[bl>>2]|0,Lt=Qey(0),lt=y(Qe/lt),l=y(0);do{zt=y(Yr(j+380+(je<<3)|0,Co)),$e=y(Yr(j+364+(je<<3)|0,Co)),$e=y(k0(zt,y(_n($e,y(h[j+504>>2]))))),Lt?(zt=y($e*y(Gs(j))),Qe=y(-zt),zt!=y(-0)?(zt=y(_e*Qe),Qe=y(Ii(j,Fr,y($e+(Or?Qe:zt)),ar,xr))):Qe=$e):se&&(of=y(is(j)),of!=y(0))?Qe=y(Ii(j,Fr,y($e+y(lt*of)),ar,xr)):Qe=$e,l=y(l-y(Qe-$e)),Je=y(cn(j,Fr,xr)),c=y(cn(j,ur,xr)),Qe=y(Qe+Je),h[Za>>2]=Qe,n[kl>>2]=1,$e=y(h[j+396>>2]);e:do if(Ht($e)|0){M=Ht(Rn)|0;do if(!M){if(mo|(ns(j,ur,Rn)|0|xn)||(da(s,j)|0)!=4||(n[(vl(j,ur)|0)+4>>2]|0)==3||(n[(Sc(j,ur)|0)+4>>2]|0)==3)break;h[Ys>>2]=Rn,n[Wo>>2]=1;break e}while(!1);if(ns(j,ur,Rn)|0){M=n[j+992+(n[Fp>>2]<<2)>>2]|0,zt=y(c+y(Yr(M,Rn))),h[Ys>>2]=zt,M=Rp&(n[M+4>>2]|0)==2,n[Wo>>2]=((Ht(zt)|0|M)^1)&1;break}else{h[Ys>>2]=Rn,n[Wo>>2]=M?0:2;break}}else zt=y(Qe-Je),Xt=y(zt/$e),zt=y($e*zt),n[Wo>>2]=1,h[Ys>>2]=y(c+(zn?Xt:zt));while(!1);yr(j,Fr,ar,xr,kl,Za),yr(j,ur,Rn,xr,Wo,Ys);do if(!(ns(j,ur,Rn)|0)&&(da(s,j)|0)==4){if((n[(vl(j,ur)|0)+4>>2]|0)==3){M=0;break}M=(n[(Sc(j,ur)|0)+4>>2]|0)!=3}else M=0;while(!1);zt=y(h[Za>>2]),Xt=y(h[Ys>>2]),pf=n[kl>>2]|0,li=n[Wo>>2]|0,ha(j,zn?zt:Xt,zn?Xt:zt,Ss,zn?pf:li,zn?li:pf,xr,Eo,Q&(M^1),3488,O)|0,o[xl>>0]=o[xl>>0]|o[j+508>>0],j=n[j+960>>2]|0}while(j|0)}}else l=y(0);if(l=y(Pr+l),li=l>0]=li|u[xl>>0],kp&l>y(0)?(M=n[bl>>2]|0,n[s+364+(M<<3)+4>>2]|0&&(Ws=y(Yr(s+364+(M<<3)|0,Co)),Ws>=y(0))?Qe=y(_n(y(0),y(Ws-y(ar-l)))):Qe=y(0)):Qe=l,Lt=Tr>>>0>>0,Lt){je=n[wo>>2]|0,se=Tr,M=0;do j=n[je+(se<<2)>>2]|0,n[j+24>>2]|0||(M=((n[(vl(j,Fr)|0)+4>>2]|0)==3&1)+M|0,M=M+((n[(Sc(j,Fr)|0)+4>>2]|0)==3&1)|0),se=se+1|0;while((se|0)!=(cr|0));M?(Je=y(0),c=y(0)):Xr=101}else Xr=101;e:do if((Xr|0)==101)switch(Xr=0,Np|0){case 1:{M=0,Je=y(Qe*y(.5)),c=y(0);break e}case 2:{M=0,Je=Qe,c=y(0);break e}case 3:{if(m>>>0<=1){M=0,Je=y(0),c=y(0);break e}c=y((m+-1|0)>>>0),M=0,Je=y(0),c=y(y(_n(Qe,y(0)))/c);break e}case 5:{c=y(Qe/y((m+1|0)>>>0)),M=0,Je=c;break e}case 4:{c=y(Qe/y(m>>>0)),M=0,Je=y(c*y(.5));break e}default:{M=0,Je=y(0),c=y(0);break e}}while(!1);if(l=y(Lp+Je),Lt){$e=y(Qe/y(M|0)),se=n[wo>>2]|0,j=Tr,Qe=y(0);do{M=n[se+(j<<2)>>2]|0;e:do if((n[M+36>>2]|0)!=1){switch(n[M+24>>2]|0){case 1:{if(gi(M,Fr)|0){if(!Q)break e;zt=y(Mr(M,Fr,ar)),zt=y(zt+y(Cr(s,Fr))),zt=y(zt+y(V(M,Fr,xr))),h[M+400+(n[Nu>>2]<<2)>>2]=zt;break e}break}case 0:if(li=(n[(vl(M,Fr)|0)+4>>2]|0)==3,zt=y($e+l),l=li?zt:l,Q&&(li=M+400+(n[Nu>>2]<<2)|0,h[li>>2]=y(l+y(h[li>>2]))),li=(n[(Sc(M,Fr)|0)+4>>2]|0)==3,zt=y($e+l),l=li?zt:l,Ca){zt=y(c+y(cn(M,Fr,xr))),Qe=Rn,l=y(l+y(zt+y(h[M+504>>2])));break e}else{l=y(l+y(c+y(ss(M,Fr,xr)))),Qe=y(_n(Qe,y(ss(M,ur,xr))));break e}default:}Q&&(zt=y(Je+y(Cr(s,Fr))),li=M+400+(n[Nu>>2]<<2)|0,h[li>>2]=y(zt+y(h[li>>2])))}while(!1);j=j+1|0}while((j|0)!=(cr|0))}else Qe=y(0);if(c=y(Mp+l),Qp?Je=y(y(Ii(s,ur,y(Ks+Qe),Tc,B))-Ks):Je=Rn,$e=y(y(Ii(s,ur,y(Ks+(Tp?Rn:Qe)),Tc,B))-Ks),Lt&Q){j=Tr;do{se=n[(n[wo>>2]|0)+(j<<2)>>2]|0;do if((n[se+36>>2]|0)!=1){if((n[se+24>>2]|0)==1){if(gi(se,ur)|0){if(zt=y(Mr(se,ur,Rn)),zt=y(zt+y(Cr(s,ur))),zt=y(zt+y(V(se,ur,xr))),M=n[Tu>>2]|0,h[se+400+(M<<2)>>2]=zt,!(Ht(zt)|0))break}else M=n[Tu>>2]|0;zt=y(Cr(s,ur)),h[se+400+(M<<2)>>2]=y(zt+y(V(se,ur,xr)));break}M=da(s,se)|0;do if((M|0)==4){if((n[(vl(se,ur)|0)+4>>2]|0)==3){Xr=139;break}if((n[(Sc(se,ur)|0)+4>>2]|0)==3){Xr=139;break}if(ns(se,ur,Rn)|0){l=Oe;break}pf=n[se+908+(n[bl>>2]<<2)>>2]|0,n[Ys>>2]=pf,l=y(h[se+396>>2]),li=Ht(l)|0,Qe=(n[v>>2]=pf,y(h[v>>2])),li?l=$e:(Pr=y(cn(se,ur,xr)),zt=y(Qe/l),l=y(l*Qe),l=y(Pr+(zn?zt:l))),h[Za>>2]=l,h[Ys>>2]=y(y(cn(se,Fr,xr))+Qe),n[Wo>>2]=1,n[kl>>2]=1,yr(se,Fr,ar,xr,Wo,Ys),yr(se,ur,Rn,xr,kl,Za),l=y(h[Ys>>2]),Pr=y(h[Za>>2]),zt=zn?l:Pr,l=zn?Pr:l,li=((Ht(zt)|0)^1)&1,ha(se,zt,l,Ss,li,((Ht(l)|0)^1)&1,xr,Eo,1,3493,O)|0,l=Oe}else Xr=139;while(!1);e:do if((Xr|0)==139){Xr=0,l=y(Je-y(ss(se,ur,xr)));do if((n[(vl(se,ur)|0)+4>>2]|0)==3){if((n[(Sc(se,ur)|0)+4>>2]|0)!=3)break;l=y(Oe+y(_n(y(0),y(l*y(.5)))));break e}while(!1);if((n[(Sc(se,ur)|0)+4>>2]|0)==3){l=Oe;break}if((n[(vl(se,ur)|0)+4>>2]|0)==3){l=y(Oe+y(_n(y(0),l)));break}switch(M|0){case 1:{l=Oe;break e}case 2:{l=y(Oe+y(l*y(.5)));break e}default:{l=y(Oe+l);break e}}}while(!1);zt=y(Ps+l),li=se+400+(n[Tu>>2]<<2)|0,h[li>>2]=y(zt+y(h[li>>2]))}while(!1);j=j+1|0}while((j|0)!=(cr|0))}if(Ps=y(Ps+$e),Qc=y(_n(Qc,c)),m=yo+1|0,cr>>>0>=Vs>>>0)break;l=ar,Tr=cr,yo=m}do if(Q){if(M=m>>>0>1,!M&&!(Yi(s)|0))break;if(!(Ht(Rn)|0)){l=y(Rn-Ps);e:do switch(n[s+12>>2]|0){case 3:{Oe=y(Oe+l),_e=y(0);break}case 2:{Oe=y(Oe+y(l*y(.5))),_e=y(0);break}case 4:{Rn>Ps?_e=y(l/y(m>>>0)):_e=y(0);break}case 7:if(Rn>Ps){Oe=y(Oe+y(l/y(m<<1>>>0))),_e=y(l/y(m>>>0)),_e=M?_e:y(0);break e}else{Oe=y(Oe+y(l*y(.5))),_e=y(0);break e}case 6:{_e=y(l/y(yo>>>0)),_e=Rn>Ps&M?_e:y(0);break}default:_e=y(0)}while(!1);if(m|0)for(Lt=1040+(ur<<2)|0,Or=976+(ur<<2)|0,je=0,j=0;;){e:do if(j>>>0>>0)for(Qe=y(0),$e=y(0),l=y(0),se=j;;){M=n[(n[wo>>2]|0)+(se<<2)>>2]|0;do if((n[M+36>>2]|0)!=1&&!(n[M+24>>2]|0)){if((n[M+940>>2]|0)!=(je|0))break e;if(Bd(M,ur)|0&&(zt=y(h[M+908+(n[Or>>2]<<2)>>2]),l=y(_n(l,y(zt+y(cn(M,ur,xr)))))),(da(s,M)|0)!=5)break;Ws=y(Ka(M)),Ws=y(Ws+y(V(M,0,xr))),zt=y(h[M+912>>2]),zt=y(y(zt+y(cn(M,0,xr)))-Ws),Ws=y(_n($e,Ws)),zt=y(_n(Qe,zt)),Qe=zt,$e=Ws,l=y(_n(l,y(Ws+zt)))}while(!1);if(M=se+1|0,M>>>0>>0)se=M;else{se=M;break}}else $e=y(0),l=y(0),se=j;while(!1);if(lt=y(_e+l),c=Oe,Oe=y(Oe+lt),j>>>0>>0){Je=y(c+$e),M=j;do{j=n[(n[wo>>2]|0)+(M<<2)>>2]|0;e:do if((n[j+36>>2]|0)!=1&&!(n[j+24>>2]|0))switch(da(s,j)|0){case 1:{zt=y(c+y(V(j,ur,xr))),h[j+400+(n[Lt>>2]<<2)>>2]=zt;break e}case 3:{zt=y(y(Oe-y(re(j,ur,xr)))-y(h[j+908+(n[Or>>2]<<2)>>2])),h[j+400+(n[Lt>>2]<<2)>>2]=zt;break e}case 2:{zt=y(c+y(y(lt-y(h[j+908+(n[Or>>2]<<2)>>2]))*y(.5))),h[j+400+(n[Lt>>2]<<2)>>2]=zt;break e}case 4:{if(zt=y(c+y(V(j,ur,xr))),h[j+400+(n[Lt>>2]<<2)>>2]=zt,ns(j,ur,Rn)|0||(zn?(Qe=y(h[j+908>>2]),l=y(Qe+y(cn(j,Fr,xr))),$e=lt):($e=y(h[j+912>>2]),$e=y($e+y(cn(j,ur,xr))),l=lt,Qe=y(h[j+908>>2])),wi(l,Qe)|0&&wi($e,y(h[j+912>>2]))|0))break e;ha(j,l,$e,Ss,1,1,xr,Eo,1,3501,O)|0;break e}case 5:{h[j+404>>2]=y(y(Je-y(Ka(j)))+y(Mr(j,0,Rn)));break e}default:break e}while(!1);M=M+1|0}while((M|0)!=(se|0))}if(je=je+1|0,(je|0)==(m|0))break;j=se}}}while(!1);if(h[s+908>>2]=y(Ii(s,2,Fc,B,B)),h[s+912>>2]=y(Ii(s,0,af,k,B)),Ql|0&&(lf=n[s+32>>2]|0,cf=(Ql|0)==2,!(cf&(lf|0)!=2))?cf&(lf|0)==2&&(l=y(Rc+ar),l=y(_n(y(k0(l,y(MA(s,Fr,Qc,Co)))),Rc)),Xr=198):(l=y(Ii(s,Fr,Qc,Co,B)),Xr=198),(Xr|0)==198&&(h[s+908+(n[976+(Fr<<2)>>2]<<2)>>2]=l),Fl|0&&(Af=n[s+32>>2]|0,ff=(Fl|0)==2,!(ff&(Af|0)!=2))?ff&(Af|0)==2&&(l=y(Ks+Rn),l=y(_n(y(k0(l,y(MA(s,ur,y(Ks+Ps),Tc)))),Ks)),Xr=204):(l=y(Ii(s,ur,y(Ks+Ps),Tc,B)),Xr=204),(Xr|0)==204&&(h[s+908+(n[976+(ur<<2)>>2]<<2)>>2]=l),Q){if((n[uf>>2]|0)==2){j=976+(ur<<2)|0,se=1040+(ur<<2)|0,M=0;do je=ms(s,M)|0,n[je+24>>2]|0||(pf=n[j>>2]|0,zt=y(h[s+908+(pf<<2)>>2]),li=je+400+(n[se>>2]<<2)|0,zt=y(zt-y(h[li>>2])),h[li>>2]=y(zt-y(h[je+908+(pf<<2)>>2]))),M=M+1|0;while((M|0)!=(Vs|0))}if(f|0){M=zn?Ql:d;do vd(s,f,xr,M,Eo,Ss,O),f=n[f+960>>2]|0;while(f|0)}if(M=(Fr|2|0)==3,j=(ur|2|0)==3,M|j){f=0;do se=n[(n[wo>>2]|0)+(f<<2)>>2]|0,(n[se+36>>2]|0)!=1&&(M&&yp(s,se,Fr),j&&yp(s,se,ur)),f=f+1|0;while((f|0)!=(Vs|0))}}}while(!1);C=Rl}function ga(s,l){s=s|0,l=y(l);var c=0;la(s,l>=y(0),3147),c=l==y(0),h[s+4>>2]=c?y(0):l}function Dc(s,l,c,f){s=s|0,l=y(l),c=y(c),f=f|0;var d=Xe,m=Xe,B=0,k=0,Q=0;n[2278]=(n[2278]|0)+1,Bl(s),ns(s,2,l)|0?(d=y(Yr(n[s+992>>2]|0,l)),Q=1,d=y(d+y(cn(s,2,l)))):(d=y(Yr(s+380|0,l)),d>=y(0)?Q=2:(Q=((Ht(l)|0)^1)&1,d=l)),ns(s,0,c)|0?(m=y(Yr(n[s+996>>2]|0,c)),k=1,m=y(m+y(cn(s,0,l)))):(m=y(Yr(s+388|0,c)),m>=y(0)?k=2:(k=((Ht(c)|0)^1)&1,m=c)),B=s+976|0,ha(s,d,m,f,Q,k,l,c,1,3189,n[B>>2]|0)|0&&(dp(s,n[s+496>>2]|0,l,c,l),Pc(s,y(h[(n[B>>2]|0)+4>>2]),y(0),y(0)),o[11696]|0)&&yd(s,7)}function Bl(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;k=C,C=C+32|0,B=k+24|0,m=k+16|0,f=k+8|0,d=k,c=0;do l=s+380+(c<<3)|0,n[s+380+(c<<3)+4>>2]|0&&(Q=l,O=n[Q+4>>2]|0,M=f,n[M>>2]=n[Q>>2],n[M+4>>2]=O,M=s+364+(c<<3)|0,O=n[M+4>>2]|0,Q=d,n[Q>>2]=n[M>>2],n[Q+4>>2]=O,n[m>>2]=n[f>>2],n[m+4>>2]=n[f+4>>2],n[B>>2]=n[d>>2],n[B+4>>2]=n[d+4>>2],Bs(m,B)|0)||(l=s+348+(c<<3)|0),n[s+992+(c<<2)>>2]=l,c=c+1|0;while((c|0)!=2);C=k}function ns(s,l,c){s=s|0,l=l|0,c=y(c);var f=0;switch(s=n[s+992+(n[976+(l<<2)>>2]<<2)>>2]|0,n[s+4>>2]|0){case 0:case 3:{s=0;break}case 1:{y(h[s>>2])>2])>2]|0){case 2:{l=y(y(y(h[s>>2])*l)/y(100));break}case 1:{l=y(h[s>>2]);break}default:l=y(ce)}return y(l)}function dp(s,l,c,f,d){s=s|0,l=l|0,c=y(c),f=y(f),d=y(d);var m=0,B=Xe;l=n[s+944>>2]|0?l:1,m=fr(n[s+4>>2]|0,l)|0,l=iw(m,l)|0,c=y(Dd(s,m,c)),f=y(Dd(s,l,f)),B=y(c+y(V(s,m,d))),h[s+400+(n[1040+(m<<2)>>2]<<2)>>2]=B,c=y(c+y(re(s,m,d))),h[s+400+(n[1e3+(m<<2)>>2]<<2)>>2]=c,c=y(f+y(V(s,l,d))),h[s+400+(n[1040+(l<<2)>>2]<<2)>>2]=c,d=y(f+y(re(s,l,d))),h[s+400+(n[1e3+(l<<2)>>2]<<2)>>2]=d}function Pc(s,l,c,f){s=s|0,l=y(l),c=y(c),f=y(f);var d=0,m=0,B=Xe,k=Xe,Q=0,O=0,M=Xe,j=0,se=Xe,je=Xe,Oe=Xe,Qe=Xe;if(l!=y(0)&&(d=s+400|0,Qe=y(h[d>>2]),m=s+404|0,Oe=y(h[m>>2]),j=s+416|0,je=y(h[j>>2]),O=s+420|0,B=y(h[O>>2]),se=y(Qe+c),M=y(Oe+f),f=y(se+je),k=y(M+B),Q=(n[s+988>>2]|0)==1,h[d>>2]=y(Go(Qe,l,0,Q)),h[m>>2]=y(Go(Oe,l,0,Q)),c=y(mR(y(je*l),y(1))),wi(c,y(0))|0?m=0:m=(wi(c,y(1))|0)^1,c=y(mR(y(B*l),y(1))),wi(c,y(0))|0?d=0:d=(wi(c,y(1))|0)^1,Qe=y(Go(f,l,Q&m,Q&(m^1))),h[j>>2]=y(Qe-y(Go(se,l,0,Q))),Qe=y(Go(k,l,Q&d,Q&(d^1))),h[O>>2]=y(Qe-y(Go(M,l,0,Q))),m=(n[s+952>>2]|0)-(n[s+948>>2]|0)>>2,m|0)){d=0;do Pc(ms(s,d)|0,l,se,M),d=d+1|0;while((d|0)!=(m|0))}}function nw(s,l,c,f,d){switch(s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,c|0){case 5:case 0:{s=e7(n[489]|0,f,d)|0;break}default:s=$Ue(f,d)|0}return s|0}function g0(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;d=C,C=C+16|0,m=d,n[m>>2]=f,d0(s,0,l,c,m),C=d}function d0(s,l,c,f,d){if(s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,s=s|0?s:956,w7[n[s+8>>2]&1](s,l,c,f,d)|0,(c|0)==5)Tt();else return}function Wa(s,l,c){s=s|0,l=l|0,c=c|0,o[s+l>>0]=c&1}function Cd(s,l){s=s|0,l=l|0;var c=0,f=0;n[s>>2]=0,n[s+4>>2]=0,n[s+8>>2]=0,c=l+4|0,f=(n[c>>2]|0)-(n[l>>2]|0)>>2,f|0&&(m0(s,f),Qt(s,n[l>>2]|0,n[c>>2]|0,f))}function m0(s,l){s=s|0,l=l|0;var c=0;if((N(s)|0)>>>0>>0&&Jr(s),l>>>0>1073741823)Tt();else{c=Kt(l<<2)|0,n[s+4>>2]=c,n[s>>2]=c,n[s+8>>2]=c+(l<<2);return}}function Qt(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,f=s+4|0,s=c-l|0,(s|0)>0&&(Dr(n[f>>2]|0,l|0,s|0)|0,n[f>>2]=(n[f>>2]|0)+(s>>>2<<2))}function N(s){return s=s|0,1073741823}function V(s,l,c){return s=s|0,l=l|0,c=y(c),he(l)|0&&n[s+96>>2]|0?s=s+92|0:s=Fn(s+60|0,n[1040+(l<<2)>>2]|0,992)|0,y(ze(s,c))}function re(s,l,c){return s=s|0,l=l|0,c=y(c),he(l)|0&&n[s+104>>2]|0?s=s+100|0:s=Fn(s+60|0,n[1e3+(l<<2)>>2]|0,992)|0,y(ze(s,c))}function he(s){return s=s|0,(s|1|0)==3|0}function ze(s,l){return s=s|0,l=y(l),(n[s+4>>2]|0)==3?l=y(0):l=y(Yr(s,l)),y(l)}function mt(s,l){return s=s|0,l=l|0,s=n[s>>2]|0,(s|0?s:(l|0)>1?l:1)|0}function fr(s,l){s=s|0,l=l|0;var c=0;e:do if((l|0)==2){switch(s|0){case 2:{s=3;break e}case 3:break;default:{c=4;break e}}s=2}else c=4;while(!1);return s|0}function Cr(s,l){s=s|0,l=l|0;var c=Xe;return he(l)|0&&n[s+312>>2]|0&&(c=y(h[s+308>>2]),c>=y(0))||(c=y(_n(y(h[(Fn(s+276|0,n[1040+(l<<2)>>2]|0,992)|0)>>2]),y(0)))),y(c)}function yn(s,l){s=s|0,l=l|0;var c=Xe;return he(l)|0&&n[s+320>>2]|0&&(c=y(h[s+316>>2]),c>=y(0))||(c=y(_n(y(h[(Fn(s+276|0,n[1e3+(l<<2)>>2]|0,992)|0)>>2]),y(0)))),y(c)}function oi(s,l,c){s=s|0,l=l|0,c=y(c);var f=Xe;return he(l)|0&&n[s+240>>2]|0&&(f=y(Yr(s+236|0,c)),f>=y(0))||(f=y(_n(y(Yr(Fn(s+204|0,n[1040+(l<<2)>>2]|0,992)|0,c)),y(0)))),y(f)}function Li(s,l,c){s=s|0,l=l|0,c=y(c);var f=Xe;return he(l)|0&&n[s+248>>2]|0&&(f=y(Yr(s+244|0,c)),f>=y(0))||(f=y(_n(y(Yr(Fn(s+204|0,n[1e3+(l<<2)>>2]|0,992)|0,c)),y(0)))),y(f)}function y0(s,l,c,f,d,m,B){s=s|0,l=y(l),c=y(c),f=f|0,d=d|0,m=y(m),B=y(B);var k=Xe,Q=Xe,O=Xe,M=Xe,j=Xe,se=Xe,je=0,Oe=0,Qe=0;Qe=C,C=C+16|0,je=Qe,Oe=s+964|0,Un(s,(n[Oe>>2]|0)!=0,3519),k=y(En(s,2,l)),Q=y(En(s,0,l)),O=y(cn(s,2,l)),M=y(cn(s,0,l)),Ht(l)|0?j=l:j=y(_n(y(0),y(y(l-O)-k))),Ht(c)|0?se=c:se=y(_n(y(0),y(y(c-M)-Q))),(f|0)==1&(d|0)==1?(h[s+908>>2]=y(Ii(s,2,y(l-O),m,m)),l=y(Ii(s,0,y(c-M),B,m))):(I7[n[Oe>>2]&1](je,s,j,f,se,d),j=y(k+y(h[je>>2])),se=y(l-O),h[s+908>>2]=y(Ii(s,2,(f|2|0)==2?j:se,m,m)),se=y(Q+y(h[je+4>>2])),l=y(c-M),l=y(Ii(s,0,(d|2|0)==2?se:l,B,m))),h[s+912>>2]=l,C=Qe}function Sv(s,l,c,f,d,m,B){s=s|0,l=y(l),c=y(c),f=f|0,d=d|0,m=y(m),B=y(B);var k=Xe,Q=Xe,O=Xe,M=Xe;O=y(En(s,2,m)),k=y(En(s,0,m)),M=y(cn(s,2,m)),Q=y(cn(s,0,m)),l=y(l-M),h[s+908>>2]=y(Ii(s,2,(f|2|0)==2?O:l,m,m)),c=y(c-Q),h[s+912>>2]=y(Ii(s,0,(d|2|0)==2?k:c,B,m))}function bv(s,l,c,f,d,m,B){s=s|0,l=y(l),c=y(c),f=f|0,d=d|0,m=y(m),B=y(B);var k=0,Q=Xe,O=Xe;return k=(f|0)==2,!(l<=y(0)&k)&&!(c<=y(0)&(d|0)==2)&&!((f|0)==1&(d|0)==1)?s=0:(Q=y(cn(s,0,m)),O=y(cn(s,2,m)),k=l>2]=y(Ii(s,2,k?y(0):l,m,m)),l=y(c-Q),k=c>2]=y(Ii(s,0,k?y(0):l,B,m)),s=1),s|0}function iw(s,l){return s=s|0,l=l|0,OA(s)|0?s=fr(2,l)|0:s=0,s|0}function mp(s,l,c){return s=s|0,l=l|0,c=y(c),c=y(oi(s,l,c)),y(c+y(Cr(s,l)))}function sw(s,l,c){return s=s|0,l=l|0,c=y(c),c=y(Li(s,l,c)),y(c+y(yn(s,l)))}function En(s,l,c){s=s|0,l=l|0,c=y(c);var f=Xe;return f=y(mp(s,l,c)),y(f+y(sw(s,l,c)))}function wd(s){return s=s|0,n[s+24>>2]|0?s=0:y(is(s))!=y(0)?s=1:s=y(Gs(s))!=y(0),s|0}function is(s){s=s|0;var l=Xe;if(n[s+944>>2]|0){if(l=y(h[s+44>>2]),Ht(l)|0)return l=y(h[s+40>>2]),s=l>y(0)&((Ht(l)|0)^1),y(s?l:y(0))}else l=y(0);return y(l)}function Gs(s){s=s|0;var l=Xe,c=0,f=Xe;do if(n[s+944>>2]|0){if(l=y(h[s+48>>2]),Ht(l)|0){if(c=o[(n[s+976>>2]|0)+2>>0]|0,!(c<<24>>24)&&(f=y(h[s+40>>2]),f>24?y(1):y(0)}}else l=y(0);while(!1);return y(l)}function Du(s){s=s|0;var l=0,c=0;if(Od(s+400|0,0,540)|0,o[s+985>>0]=1,ee(s),c=Ci(s)|0,c|0){l=s+948|0,s=0;do Du(n[(n[l>>2]|0)+(s<<2)>>2]|0),s=s+1|0;while((s|0)!=(c|0))}}function Id(s,l,c,f,d,m,B,k,Q,O){s=s|0,l=l|0,c=y(c),f=f|0,d=y(d),m=y(m),B=y(B),k=k|0,Q=Q|0,O=O|0;var M=0,j=Xe,se=0,je=0,Oe=Xe,Qe=Xe,$e=0,Je=Xe,lt=0,_e=Xe,qe=0,Lt=0,Or=0,cr=0,Xt=0,Pr=0,Tr=0,ar=0,xn=0,go=0;xn=C,C=C+16|0,Or=xn+12|0,cr=xn+8|0,Xt=xn+4|0,Pr=xn,ar=fr(n[s+4>>2]|0,Q)|0,qe=he(ar)|0,j=y(Yr(ow(l)|0,qe?m:B)),Lt=ns(l,2,m)|0,Tr=ns(l,0,B)|0;do if(!(Ht(j)|0)&&!(Ht(qe?c:d)|0)){if(M=l+504|0,!(Ht(y(h[M>>2]))|0)&&(!(aw(n[l+976>>2]|0,0)|0)||(n[l+500>>2]|0)==(n[2278]|0)))break;h[M>>2]=y(_n(j,y(En(l,ar,m))))}else se=7;while(!1);do if((se|0)==7){if(lt=qe^1,!(lt|Lt^1)){B=y(Yr(n[l+992>>2]|0,m)),h[l+504>>2]=y(_n(B,y(En(l,2,m))));break}if(!(qe|Tr^1)){B=y(Yr(n[l+996>>2]|0,B)),h[l+504>>2]=y(_n(B,y(En(l,0,m))));break}h[Or>>2]=y(ce),h[cr>>2]=y(ce),n[Xt>>2]=0,n[Pr>>2]=0,Je=y(cn(l,2,m)),_e=y(cn(l,0,m)),Lt?(Oe=y(Je+y(Yr(n[l+992>>2]|0,m))),h[Or>>2]=Oe,n[Xt>>2]=1,je=1):(je=0,Oe=y(ce)),Tr?(j=y(_e+y(Yr(n[l+996>>2]|0,B))),h[cr>>2]=j,n[Pr>>2]=1,M=1):(M=0,j=y(ce)),se=n[s+32>>2]|0,qe&(se|0)==2?se=2:Ht(Oe)|0&&!(Ht(c)|0)&&(h[Or>>2]=c,n[Xt>>2]=2,je=2,Oe=c),!((se|0)==2<)&&Ht(j)|0&&!(Ht(d)|0)&&(h[cr>>2]=d,n[Pr>>2]=2,M=2,j=d),Qe=y(h[l+396>>2]),$e=Ht(Qe)|0;do if($e)se=je;else{if((je|0)==1<){h[cr>>2]=y(y(Oe-Je)/Qe),n[Pr>>2]=1,M=1,se=1;break}qe&(M|0)==1?(h[Or>>2]=y(Qe*y(j-_e)),n[Xt>>2]=1,M=1,se=1):se=je}while(!1);go=Ht(c)|0,je=(da(s,l)|0)!=4,!(qe|Lt|((f|0)!=1|go)|(je|(se|0)==1))&&(h[Or>>2]=c,n[Xt>>2]=1,!$e)&&(h[cr>>2]=y(y(c-Je)/Qe),n[Pr>>2]=1,M=1),!(Tr|lt|((k|0)!=1|(Ht(d)|0))|(je|(M|0)==1))&&(h[cr>>2]=d,n[Pr>>2]=1,!$e)&&(h[Or>>2]=y(Qe*y(d-_e)),n[Xt>>2]=1),yr(l,2,m,m,Xt,Or),yr(l,0,B,m,Pr,cr),c=y(h[Or>>2]),d=y(h[cr>>2]),ha(l,c,d,Q,n[Xt>>2]|0,n[Pr>>2]|0,m,B,0,3565,O)|0,B=y(h[l+908+(n[976+(ar<<2)>>2]<<2)>>2]),h[l+504>>2]=y(_n(B,y(En(l,ar,m))))}while(!1);n[l+500>>2]=n[2278],C=xn}function Ii(s,l,c,f,d){return s=s|0,l=l|0,c=y(c),f=y(f),d=y(d),f=y(MA(s,l,c,f)),y(_n(f,y(En(s,l,d))))}function da(s,l){return s=s|0,l=l|0,l=l+20|0,l=n[(n[l>>2]|0?l:s+16|0)>>2]|0,(l|0)==5&&OA(n[s+4>>2]|0)|0&&(l=1),l|0}function vl(s,l){return s=s|0,l=l|0,he(l)|0&&n[s+96>>2]|0?l=4:l=n[1040+(l<<2)>>2]|0,s+60+(l<<3)|0}function Sc(s,l){return s=s|0,l=l|0,he(l)|0&&n[s+104>>2]|0?l=5:l=n[1e3+(l<<2)>>2]|0,s+60+(l<<3)|0}function yr(s,l,c,f,d,m){switch(s=s|0,l=l|0,c=y(c),f=y(f),d=d|0,m=m|0,c=y(Yr(s+380+(n[976+(l<<2)>>2]<<3)|0,c)),c=y(c+y(cn(s,l,f))),n[d>>2]|0){case 2:case 1:{d=Ht(c)|0,f=y(h[m>>2]),h[m>>2]=d|f>2]=2,h[m>>2]=c);break}default:}}function gi(s,l){return s=s|0,l=l|0,s=s+132|0,he(l)|0&&n[(Fn(s,4,948)|0)+4>>2]|0?s=1:s=(n[(Fn(s,n[1040+(l<<2)>>2]|0,948)|0)+4>>2]|0)!=0,s|0}function Mr(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0;return s=s+132|0,he(l)|0&&(f=Fn(s,4,948)|0,(n[f+4>>2]|0)!=0)?d=4:(f=Fn(s,n[1040+(l<<2)>>2]|0,948)|0,n[f+4>>2]|0?d=4:c=y(0)),(d|0)==4&&(c=y(Yr(f,c))),y(c)}function ss(s,l,c){s=s|0,l=l|0,c=y(c);var f=Xe;return f=y(h[s+908+(n[976+(l<<2)>>2]<<2)>>2]),f=y(f+y(V(s,l,c))),y(f+y(re(s,l,c)))}function Yi(s){s=s|0;var l=0,c=0,f=0;e:do if(OA(n[s+4>>2]|0)|0)l=0;else if((n[s+16>>2]|0)!=5)if(c=Ci(s)|0,!c)l=0;else for(l=0;;){if(f=ms(s,l)|0,!(n[f+24>>2]|0)&&(n[f+20>>2]|0)==5){l=1;break e}if(l=l+1|0,l>>>0>=c>>>0){l=0;break}}else l=1;while(!1);return l|0}function Bd(s,l){s=s|0,l=l|0;var c=Xe;return c=y(h[s+908+(n[976+(l<<2)>>2]<<2)>>2]),c>=y(0)&((Ht(c)|0)^1)|0}function Ka(s){s=s|0;var l=Xe,c=0,f=0,d=0,m=0,B=0,k=0,Q=Xe;if(c=n[s+968>>2]|0,c)Q=y(h[s+908>>2]),l=y(h[s+912>>2]),l=y(m7[c&0](s,Q,l)),Un(s,(Ht(l)|0)^1,3573);else{m=Ci(s)|0;do if(m|0){for(c=0,d=0;;){if(f=ms(s,d)|0,n[f+940>>2]|0){B=8;break}if((n[f+24>>2]|0)!=1)if(k=(da(s,f)|0)==5,k){c=f;break}else c=c|0?c:f;if(d=d+1|0,d>>>0>=m>>>0){B=8;break}}if((B|0)==8&&!c)break;return l=y(Ka(c)),y(l+y(h[c+404>>2]))}while(!1);l=y(h[s+912>>2])}return y(l)}function MA(s,l,c,f){s=s|0,l=l|0,c=y(c),f=y(f);var d=Xe,m=0;return OA(l)|0?(l=1,m=3):he(l)|0?(l=0,m=3):(f=y(ce),d=y(ce)),(m|0)==3&&(d=y(Yr(s+364+(l<<3)|0,f)),f=y(Yr(s+380+(l<<3)|0,f))),m=f=y(0)&((Ht(f)|0)^1)),c=m?f:c,m=d>=y(0)&((Ht(d)|0)^1)&c>2]|0,m)|0,Oe=iw($e,m)|0,Qe=he($e)|0,j=y(cn(l,2,c)),se=y(cn(l,0,c)),ns(l,2,c)|0?k=y(j+y(Yr(n[l+992>>2]|0,c))):gi(l,2)|0&&lr(l,2)|0?(k=y(h[s+908>>2]),Q=y(Cr(s,2)),Q=y(k-y(Q+y(yn(s,2)))),k=y(Mr(l,2,c)),k=y(Ii(l,2,y(Q-y(k+y(Pu(l,2,c)))),c,c))):k=y(ce),ns(l,0,d)|0?Q=y(se+y(Yr(n[l+996>>2]|0,d))):gi(l,0)|0&&lr(l,0)|0?(Q=y(h[s+912>>2]),lt=y(Cr(s,0)),lt=y(Q-y(lt+y(yn(s,0)))),Q=y(Mr(l,0,d)),Q=y(Ii(l,0,y(lt-y(Q+y(Pu(l,0,d)))),d,c))):Q=y(ce),O=Ht(k)|0,M=Ht(Q)|0;do if(O^M&&(je=y(h[l+396>>2]),!(Ht(je)|0)))if(O){k=y(j+y(y(Q-se)*je));break}else{lt=y(se+y(y(k-j)/je)),Q=M?lt:Q;break}while(!1);M=Ht(k)|0,O=Ht(Q)|0,M|O&&(_e=(M^1)&1,f=c>y(0)&((f|0)!=0&M),k=Qe?k:f?c:k,ha(l,k,Q,m,Qe?_e:f?2:_e,M&(O^1)&1,k,Q,0,3623,B)|0,k=y(h[l+908>>2]),k=y(k+y(cn(l,2,c))),Q=y(h[l+912>>2]),Q=y(Q+y(cn(l,0,c)))),ha(l,k,Q,m,1,1,k,Q,1,3635,B)|0,lr(l,$e)|0&&!(gi(l,$e)|0)?(_e=n[976+($e<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(lt-y(h[l+908+(_e<<2)>>2])),lt=y(lt-y(yn(s,$e))),lt=y(lt-y(re(l,$e,c))),lt=y(lt-y(Pu(l,$e,Qe?c:d))),h[l+400+(n[1040+($e<<2)>>2]<<2)>>2]=lt):Je=21;do if((Je|0)==21){if(!(gi(l,$e)|0)&&(n[s+8>>2]|0)==1){_e=n[976+($e<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(y(lt-y(h[l+908+(_e<<2)>>2]))*y(.5)),h[l+400+(n[1040+($e<<2)>>2]<<2)>>2]=lt;break}!(gi(l,$e)|0)&&(n[s+8>>2]|0)==2&&(_e=n[976+($e<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(lt-y(h[l+908+(_e<<2)>>2])),h[l+400+(n[1040+($e<<2)>>2]<<2)>>2]=lt)}while(!1);lr(l,Oe)|0&&!(gi(l,Oe)|0)?(_e=n[976+(Oe<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(lt-y(h[l+908+(_e<<2)>>2])),lt=y(lt-y(yn(s,Oe))),lt=y(lt-y(re(l,Oe,c))),lt=y(lt-y(Pu(l,Oe,Qe?d:c))),h[l+400+(n[1040+(Oe<<2)>>2]<<2)>>2]=lt):Je=30;do if((Je|0)==30&&!(gi(l,Oe)|0)){if((da(s,l)|0)==2){_e=n[976+(Oe<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(y(lt-y(h[l+908+(_e<<2)>>2]))*y(.5)),h[l+400+(n[1040+(Oe<<2)>>2]<<2)>>2]=lt;break}_e=(da(s,l)|0)==3,_e^(n[s+28>>2]|0)==2&&(_e=n[976+(Oe<<2)>>2]|0,lt=y(h[s+908+(_e<<2)>>2]),lt=y(lt-y(h[l+908+(_e<<2)>>2])),h[l+400+(n[1040+(Oe<<2)>>2]<<2)>>2]=lt)}while(!1)}function yp(s,l,c){s=s|0,l=l|0,c=c|0;var f=Xe,d=0;d=n[976+(c<<2)>>2]|0,f=y(h[l+908+(d<<2)>>2]),f=y(y(h[s+908+(d<<2)>>2])-f),f=y(f-y(h[l+400+(n[1040+(c<<2)>>2]<<2)>>2])),h[l+400+(n[1e3+(c<<2)>>2]<<2)>>2]=f}function OA(s){return s=s|0,(s|1|0)==1|0}function ow(s){s=s|0;var l=Xe;switch(n[s+56>>2]|0){case 0:case 3:{l=y(h[s+40>>2]),l>y(0)&((Ht(l)|0)^1)?s=o[(n[s+976>>2]|0)+2>>0]|0?1056:992:s=1056;break}default:s=s+52|0}return s|0}function aw(s,l){return s=s|0,l=l|0,(o[s+l>>0]|0)!=0|0}function lr(s,l){return s=s|0,l=l|0,s=s+132|0,he(l)|0&&n[(Fn(s,5,948)|0)+4>>2]|0?s=1:s=(n[(Fn(s,n[1e3+(l<<2)>>2]|0,948)|0)+4>>2]|0)!=0,s|0}function Pu(s,l,c){s=s|0,l=l|0,c=y(c);var f=0,d=0;return s=s+132|0,he(l)|0&&(f=Fn(s,5,948)|0,(n[f+4>>2]|0)!=0)?d=4:(f=Fn(s,n[1e3+(l<<2)>>2]|0,948)|0,n[f+4>>2]|0?d=4:c=y(0)),(d|0)==4&&(c=y(Yr(f,c))),y(c)}function Dd(s,l,c){return s=s|0,l=l|0,c=y(c),gi(s,l)|0?c=y(Mr(s,l,c)):c=y(-y(Pu(s,l,c))),y(c)}function Su(s){return s=y(s),h[v>>2]=s,n[v>>2]|0|0}function Ep(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>1073741823)Tt();else{d=Kt(l<<2)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<2)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<2)}function E0(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>2)<<2)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function UA(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-4-l|0)>>>2)<<2)),s=n[s>>2]|0,s|0&>(s)}function _A(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;if(B=s+4|0,k=n[B>>2]|0,d=k-f|0,m=d>>2,s=l+(m<<2)|0,s>>>0>>0){f=k;do n[f>>2]=n[s>>2],s=s+4|0,f=(n[B>>2]|0)+4|0,n[B>>2]=f;while(s>>>0>>0)}m|0&&ww(k+(0-m<<2)|0,l|0,d|0)|0}function C0(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0;return k=l+4|0,Q=n[k>>2]|0,d=n[s>>2]|0,B=c,m=B-d|0,f=Q+(0-(m>>2)<<2)|0,n[k>>2]=f,(m|0)>0&&Dr(f|0,d|0,m|0)|0,d=s+4|0,m=l+8|0,f=(n[d>>2]|0)-B|0,(f|0)>0&&(Dr(n[m>>2]|0,c|0,f|0)|0,n[m>>2]=(n[m>>2]|0)+(f>>>2<<2)),B=n[s>>2]|0,n[s>>2]=n[k>>2],n[k>>2]=B,B=n[d>>2]|0,n[d>>2]=n[m>>2],n[m>>2]=B,B=s+8|0,c=l+12|0,s=n[B>>2]|0,n[B>>2]=n[c>>2],n[c>>2]=s,n[l>>2]=n[k>>2],Q|0}function lw(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;if(B=n[l>>2]|0,m=n[c>>2]|0,(B|0)!=(m|0)){d=s+8|0,c=((m+-4-B|0)>>>2)+1|0,s=B,f=n[d>>2]|0;do n[f>>2]=n[s>>2],f=(n[d>>2]|0)+4|0,n[d>>2]=f,s=s+4|0;while((s|0)!=(m|0));n[l>>2]=B+(c<<2)}}function Pd(){mc()}function ma(){var s=0;return s=Kt(4)|0,HA(s),s|0}function HA(s){s=s|0,n[s>>2]=Cs()|0}function bc(s){s=s|0,s|0&&(w0(s),gt(s))}function w0(s){s=s|0,tt(n[s>>2]|0)}function Sd(s,l,c){s=s|0,l=l|0,c=c|0,Wa(n[s>>2]|0,l,c)}function fo(s,l){s=s|0,l=y(l),ga(n[s>>2]|0,l)}function xv(s,l){return s=s|0,l=l|0,aw(n[s>>2]|0,l)|0}function cw(){var s=0;return s=Kt(8)|0,kv(s,0),s|0}function kv(s,l){s=s|0,l=l|0,l?l=Ei(n[l>>2]|0)|0:l=co()|0,n[s>>2]=l,n[s+4>>2]=0,Si(l,s)}function eF(s){s=s|0;var l=0;return l=Kt(8)|0,kv(l,s),l|0}function Qv(s){s=s|0,s|0&&(bu(s),gt(s))}function bu(s){s=s|0;var l=0;ua(n[s>>2]|0),l=s+4|0,s=n[l>>2]|0,n[l>>2]=0,s|0&&(qA(s),gt(s))}function qA(s){s=s|0,jA(s)}function jA(s){s=s|0,s=n[s>>2]|0,s|0&&PA(s|0)}function uw(s){return s=s|0,qo(s)|0}function bd(s){s=s|0;var l=0,c=0;c=s+4|0,l=n[c>>2]|0,n[c>>2]=0,l|0&&(qA(l),gt(l)),qs(n[s>>2]|0)}function tF(s,l){s=s|0,l=l|0,$r(n[s>>2]|0,n[l>>2]|0)}function rF(s,l){s=s|0,l=l|0,Aa(n[s>>2]|0,l)}function Fv(s,l,c){s=s|0,l=l|0,c=+c,Cu(n[s>>2]|0,l,y(c))}function Rv(s,l,c){s=s|0,l=l|0,c=+c,ws(n[s>>2]|0,l,y(c))}function Aw(s,l){s=s|0,l=l|0,mu(n[s>>2]|0,l)}function xu(s,l){s=s|0,l=l|0,yu(n[s>>2]|0,l)}function nF(s,l){s=s|0,l=l|0,QA(n[s>>2]|0,l)}function iF(s,l){s=s|0,l=l|0,xA(n[s>>2]|0,l)}function Cp(s,l){s=s|0,l=l|0,Ec(n[s>>2]|0,l)}function sF(s,l){s=s|0,l=l|0,lp(n[s>>2]|0,l)}function Tv(s,l,c){s=s|0,l=l|0,c=+c,wc(n[s>>2]|0,l,y(c))}function GA(s,l,c){s=s|0,l=l|0,c=+c,Y(n[s>>2]|0,l,y(c))}function oF(s,l){s=s|0,l=l|0,wl(n[s>>2]|0,l)}function aF(s,l){s=s|0,l=l|0,n0(n[s>>2]|0,l)}function Nv(s,l){s=s|0,l=l|0,cp(n[s>>2]|0,l)}function fw(s,l){s=s|0,l=+l,FA(n[s>>2]|0,y(l))}function pw(s,l){s=s|0,l=+l,ja(n[s>>2]|0,y(l))}function lF(s,l){s=s|0,l=+l,Gi(n[s>>2]|0,y(l))}function cF(s,l){s=s|0,l=+l,js(n[s>>2]|0,y(l))}function Dl(s,l){s=s|0,l=+l,Eu(n[s>>2]|0,y(l))}function hw(s,l){s=s|0,l=+l,tw(n[s>>2]|0,y(l))}function uF(s,l){s=s|0,l=+l,RA(n[s>>2]|0,y(l))}function YA(s){s=s|0,up(n[s>>2]|0)}function xd(s,l){s=s|0,l=+l,Is(n[s>>2]|0,y(l))}function ku(s,l){s=s|0,l=+l,o0(n[s>>2]|0,y(l))}function gw(s){s=s|0,a0(n[s>>2]|0)}function dw(s,l){s=s|0,l=+l,Ap(n[s>>2]|0,y(l))}function AF(s,l){s=s|0,l=+l,Bc(n[s>>2]|0,y(l))}function Lv(s,l){s=s|0,l=+l,gd(n[s>>2]|0,y(l))}function WA(s,l){s=s|0,l=+l,c0(n[s>>2]|0,y(l))}function Mv(s,l){s=s|0,l=+l,Iu(n[s>>2]|0,y(l))}function kd(s,l){s=s|0,l=+l,dd(n[s>>2]|0,y(l))}function Ov(s,l){s=s|0,l=+l,Bu(n[s>>2]|0,y(l))}function Uv(s,l){s=s|0,l=+l,rw(n[s>>2]|0,y(l))}function Qd(s,l){s=s|0,l=+l,pa(n[s>>2]|0,y(l))}function _v(s,l,c){s=s|0,l=l|0,c=+c,wu(n[s>>2]|0,l,y(c))}function fF(s,l,c){s=s|0,l=l|0,c=+c,bi(n[s>>2]|0,l,y(c))}function P(s,l,c){s=s|0,l=l|0,c=+c,Ic(n[s>>2]|0,l,y(c))}function D(s){return s=s|0,r0(n[s>>2]|0)|0}function T(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;f=C,C=C+16|0,d=f,Cc(d,n[l>>2]|0,c),q(s,d),C=f}function q(s,l){s=s|0,l=l|0,W(s,n[l+4>>2]|0,+y(h[l>>2]))}function W(s,l,c){s=s|0,l=l|0,c=+c,n[s>>2]=l,E[s+8>>3]=c}function fe(s){return s=s|0,t0(n[s>>2]|0)|0}function De(s){return s=s|0,uo(n[s>>2]|0)|0}function vt(s){return s=s|0,yc(n[s>>2]|0)|0}function wt(s){return s=s|0,kA(n[s>>2]|0)|0}function bt(s){return s=s|0,hd(n[s>>2]|0)|0}function _r(s){return s=s|0,e0(n[s>>2]|0)|0}function os(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;f=C,C=C+16|0,d=f,Dt(d,n[l>>2]|0,c),q(s,d),C=f}function di(s){return s=s|0,$n(n[s>>2]|0)|0}function po(s){return s=s|0,i0(n[s>>2]|0)|0}function KA(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,fa(f,n[l>>2]|0),q(s,f),C=c}function Yo(s){return s=s|0,+ +y(ji(n[s>>2]|0))}function nt(s){return s=s|0,+ +y(rs(n[s>>2]|0))}function Ve(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,Br(f,n[l>>2]|0),q(s,f),C=c}function At(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,l0(f,n[l>>2]|0),q(s,f),C=c}function Wt(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,Ct(f,n[l>>2]|0),q(s,f),C=c}function vr(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,u0(f,n[l>>2]|0),q(s,f),C=c}function Sn(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,A0(f,n[l>>2]|0),q(s,f),C=c}function Qr(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,md(f,n[l>>2]|0),q(s,f),C=c}function bn(s){return s=s|0,+ +y(vc(n[s>>2]|0))}function ai(s,l){return s=s|0,l=l|0,+ +y(s0(n[s>>2]|0,l))}function tn(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;f=C,C=C+16|0,d=f,ct(d,n[l>>2]|0,c),q(s,d),C=f}function ho(s,l,c){s=s|0,l=l|0,c=c|0,or(n[s>>2]|0,n[l>>2]|0,c)}function pF(s,l){s=s|0,l=l|0,Es(n[s>>2]|0,n[l>>2]|0)}function nve(s){return s=s|0,Ci(n[s>>2]|0)|0}function ive(s){return s=s|0,s=pt(n[s>>2]|0)|0,s?s=uw(s)|0:s=0,s|0}function sve(s,l){return s=s|0,l=l|0,s=ms(n[s>>2]|0,l)|0,s?s=uw(s)|0:s=0,s|0}function ove(s,l){s=s|0,l=l|0;var c=0,f=0;f=Kt(4)|0,W5(f,l),c=s+4|0,l=n[c>>2]|0,n[c>>2]=f,l|0&&(qA(l),gt(l)),Bt(n[s>>2]|0,1)}function W5(s,l){s=s|0,l=l|0,yve(s,l)}function ave(s,l,c,f,d,m){s=s|0,l=l|0,c=y(c),f=f|0,d=y(d),m=m|0;var B=0,k=0;B=C,C=C+16|0,k=B,lve(k,qo(l)|0,+c,f,+d,m),h[s>>2]=y(+E[k>>3]),h[s+4>>2]=y(+E[k+8>>3]),C=B}function lve(s,l,c,f,d,m){s=s|0,l=l|0,c=+c,f=f|0,d=+d,m=m|0;var B=0,k=0,Q=0,O=0,M=0;B=C,C=C+32|0,M=B+8|0,O=B+20|0,Q=B,k=B+16|0,E[M>>3]=c,n[O>>2]=f,E[Q>>3]=d,n[k>>2]=m,cve(s,n[l+4>>2]|0,M,O,Q,k),C=B}function cve(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0;B=C,C=C+16|0,k=B,za(k),l=ya(l)|0,uve(s,l,+E[c>>3],n[f>>2]|0,+E[d>>3],n[m>>2]|0),Ja(k),C=B}function ya(s){return s=s|0,n[s>>2]|0}function uve(s,l,c,f,d,m){s=s|0,l=l|0,c=+c,f=f|0,d=+d,m=m|0;var B=0;B=Pl(Ave()|0)|0,c=+VA(c),f=hF(f)|0,d=+VA(d),fve(s,hi(0,B|0,l|0,+c,f|0,+d,hF(m)|0)|0)}function Ave(){var s=0;return o[7608]|0||(dve(9120),s=7608,n[s>>2]=1,n[s+4>>2]=0),9120}function Pl(s){return s=s|0,n[s+8>>2]|0}function VA(s){return s=+s,+ +gF(s)}function hF(s){return s=s|0,V5(s)|0}function fve(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;d=C,C=C+32|0,c=d,f=l,f&1?(pve(c,0),ii(f|0,c|0)|0,hve(s,c),gve(c)):(n[s>>2]=n[l>>2],n[s+4>>2]=n[l+4>>2],n[s+8>>2]=n[l+8>>2],n[s+12>>2]=n[l+12>>2]),C=d}function pve(s,l){s=s|0,l=l|0,K5(s,l),n[s+8>>2]=0,o[s+24>>0]=0}function hve(s,l){s=s|0,l=l|0,l=l+8|0,n[s>>2]=n[l>>2],n[s+4>>2]=n[l+4>>2],n[s+8>>2]=n[l+8>>2],n[s+12>>2]=n[l+12>>2]}function gve(s){s=s|0,o[s+24>>0]=0}function K5(s,l){s=s|0,l=l|0,n[s>>2]=l}function V5(s){return s=s|0,s|0}function gF(s){return s=+s,+s}function dve(s){s=s|0,Sl(s,mve()|0,4)}function mve(){return 1064}function Sl(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c,n[s+8>>2]=ap(l|0,c+1|0)|0}function yve(s,l){s=s|0,l=l|0,l=n[l>>2]|0,n[s>>2]=l,yl(l|0)}function Eve(s){s=s|0;var l=0,c=0;c=s+4|0,l=n[c>>2]|0,n[c>>2]=0,l|0&&(qA(l),gt(l)),Bt(n[s>>2]|0,0)}function Cve(s){s=s|0,Nt(n[s>>2]|0)}function wve(s){return s=s|0,rr(n[s>>2]|0)|0}function Ive(s,l,c,f){s=s|0,l=+l,c=+c,f=f|0,Dc(n[s>>2]|0,y(l),y(c),f)}function Bve(s){return s=s|0,+ +y(Il(n[s>>2]|0))}function vve(s){return s=s|0,+ +y(f0(n[s>>2]|0))}function Dve(s){return s=s|0,+ +y(vu(n[s>>2]|0))}function Pve(s){return s=s|0,+ +y(TA(n[s>>2]|0))}function Sve(s){return s=s|0,+ +y(fp(n[s>>2]|0))}function bve(s){return s=s|0,+ +y(Ga(n[s>>2]|0))}function xve(s,l){s=s|0,l=l|0,E[s>>3]=+y(Il(n[l>>2]|0)),E[s+8>>3]=+y(f0(n[l>>2]|0)),E[s+16>>3]=+y(vu(n[l>>2]|0)),E[s+24>>3]=+y(TA(n[l>>2]|0)),E[s+32>>3]=+y(fp(n[l>>2]|0)),E[s+40>>3]=+y(Ga(n[l>>2]|0))}function kve(s,l){return s=s|0,l=l|0,+ +y(p0(n[s>>2]|0,l))}function Qve(s,l){return s=s|0,l=l|0,+ +y(pp(n[s>>2]|0,l))}function Fve(s,l){return s=s|0,l=l|0,+ +y(jo(n[s>>2]|0,l))}function Rve(){return Pn()|0}function Tve(){Nve(),Lve(),Mve(),Ove(),Uve(),_ve()}function Nve(){UNe(11713,4938,1)}function Lve(){iNe(10448)}function Mve(){UTe(10408)}function Ove(){lTe(10324)}function Uve(){dFe(10096)}function _ve(){Hve(9132)}function Hve(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0,lt=0,_e=0,qe=0,Lt=0,Or=0,cr=0,Xt=0,Pr=0,Tr=0,ar=0,xn=0,go=0,mo=0,yo=0,Ca=0,xp=0,kp=0,bl=0,Qp=0,Tu=0,Nu=0,Fp=0,Rp=0,Tp=0,Xr=0,xl=0,Np=0,kc=0,Lp=0,Mp=0,Lu=0,Mu=0,Qc=0,Ys=0,Za=0,Wo=0,kl=0,rf=0,nf=0,Ou=0,sf=0,of=0,Ws=0,Ps=0,Ql=0,Rn=0,af=0,Eo=0,Fc=0,Co=0,Rc=0,lf=0,cf=0,Tc=0,Ks=0,Fl=0,uf=0,Af=0,ff=0,xr=0,zn=0,Ss=0,wo=0,Vs=0,Fr=0,ur=0,Rl=0;l=C,C=C+672|0,c=l+656|0,Rl=l+648|0,ur=l+640|0,Fr=l+632|0,Vs=l+624|0,wo=l+616|0,Ss=l+608|0,zn=l+600|0,xr=l+592|0,ff=l+584|0,Af=l+576|0,uf=l+568|0,Fl=l+560|0,Ks=l+552|0,Tc=l+544|0,cf=l+536|0,lf=l+528|0,Rc=l+520|0,Co=l+512|0,Fc=l+504|0,Eo=l+496|0,af=l+488|0,Rn=l+480|0,Ql=l+472|0,Ps=l+464|0,Ws=l+456|0,of=l+448|0,sf=l+440|0,Ou=l+432|0,nf=l+424|0,rf=l+416|0,kl=l+408|0,Wo=l+400|0,Za=l+392|0,Ys=l+384|0,Qc=l+376|0,Mu=l+368|0,Lu=l+360|0,Mp=l+352|0,Lp=l+344|0,kc=l+336|0,Np=l+328|0,xl=l+320|0,Xr=l+312|0,Tp=l+304|0,Rp=l+296|0,Fp=l+288|0,Nu=l+280|0,Tu=l+272|0,Qp=l+264|0,bl=l+256|0,kp=l+248|0,xp=l+240|0,Ca=l+232|0,yo=l+224|0,mo=l+216|0,go=l+208|0,xn=l+200|0,ar=l+192|0,Tr=l+184|0,Pr=l+176|0,Xt=l+168|0,cr=l+160|0,Or=l+152|0,Lt=l+144|0,qe=l+136|0,_e=l+128|0,lt=l+120|0,Je=l+112|0,$e=l+104|0,Qe=l+96|0,Oe=l+88|0,je=l+80|0,se=l+72|0,j=l+64|0,M=l+56|0,O=l+48|0,Q=l+40|0,k=l+32|0,B=l+24|0,m=l+16|0,d=l+8|0,f=l,qve(s,3646),jve(s,3651,2)|0,Gve(s,3665,2)|0,Yve(s,3682,18)|0,n[Rl>>2]=19,n[Rl+4>>2]=0,n[c>>2]=n[Rl>>2],n[c+4>>2]=n[Rl+4>>2],mw(s,3690,c)|0,n[ur>>2]=1,n[ur+4>>2]=0,n[c>>2]=n[ur>>2],n[c+4>>2]=n[ur+4>>2],Wve(s,3696,c)|0,n[Fr>>2]=2,n[Fr+4>>2]=0,n[c>>2]=n[Fr>>2],n[c+4>>2]=n[Fr+4>>2],Qu(s,3706,c)|0,n[Vs>>2]=1,n[Vs+4>>2]=0,n[c>>2]=n[Vs>>2],n[c+4>>2]=n[Vs+4>>2],I0(s,3722,c)|0,n[wo>>2]=2,n[wo+4>>2]=0,n[c>>2]=n[wo>>2],n[c+4>>2]=n[wo+4>>2],I0(s,3734,c)|0,n[Ss>>2]=3,n[Ss+4>>2]=0,n[c>>2]=n[Ss>>2],n[c+4>>2]=n[Ss+4>>2],Qu(s,3753,c)|0,n[zn>>2]=4,n[zn+4>>2]=0,n[c>>2]=n[zn>>2],n[c+4>>2]=n[zn+4>>2],Qu(s,3769,c)|0,n[xr>>2]=5,n[xr+4>>2]=0,n[c>>2]=n[xr>>2],n[c+4>>2]=n[xr+4>>2],Qu(s,3783,c)|0,n[ff>>2]=6,n[ff+4>>2]=0,n[c>>2]=n[ff>>2],n[c+4>>2]=n[ff+4>>2],Qu(s,3796,c)|0,n[Af>>2]=7,n[Af+4>>2]=0,n[c>>2]=n[Af>>2],n[c+4>>2]=n[Af+4>>2],Qu(s,3813,c)|0,n[uf>>2]=8,n[uf+4>>2]=0,n[c>>2]=n[uf>>2],n[c+4>>2]=n[uf+4>>2],Qu(s,3825,c)|0,n[Fl>>2]=3,n[Fl+4>>2]=0,n[c>>2]=n[Fl>>2],n[c+4>>2]=n[Fl+4>>2],I0(s,3843,c)|0,n[Ks>>2]=4,n[Ks+4>>2]=0,n[c>>2]=n[Ks>>2],n[c+4>>2]=n[Ks+4>>2],I0(s,3853,c)|0,n[Tc>>2]=9,n[Tc+4>>2]=0,n[c>>2]=n[Tc>>2],n[c+4>>2]=n[Tc+4>>2],Qu(s,3870,c)|0,n[cf>>2]=10,n[cf+4>>2]=0,n[c>>2]=n[cf>>2],n[c+4>>2]=n[cf+4>>2],Qu(s,3884,c)|0,n[lf>>2]=11,n[lf+4>>2]=0,n[c>>2]=n[lf>>2],n[c+4>>2]=n[lf+4>>2],Qu(s,3896,c)|0,n[Rc>>2]=1,n[Rc+4>>2]=0,n[c>>2]=n[Rc>>2],n[c+4>>2]=n[Rc+4>>2],vs(s,3907,c)|0,n[Co>>2]=2,n[Co+4>>2]=0,n[c>>2]=n[Co>>2],n[c+4>>2]=n[Co+4>>2],vs(s,3915,c)|0,n[Fc>>2]=3,n[Fc+4>>2]=0,n[c>>2]=n[Fc>>2],n[c+4>>2]=n[Fc+4>>2],vs(s,3928,c)|0,n[Eo>>2]=4,n[Eo+4>>2]=0,n[c>>2]=n[Eo>>2],n[c+4>>2]=n[Eo+4>>2],vs(s,3948,c)|0,n[af>>2]=5,n[af+4>>2]=0,n[c>>2]=n[af>>2],n[c+4>>2]=n[af+4>>2],vs(s,3960,c)|0,n[Rn>>2]=6,n[Rn+4>>2]=0,n[c>>2]=n[Rn>>2],n[c+4>>2]=n[Rn+4>>2],vs(s,3974,c)|0,n[Ql>>2]=7,n[Ql+4>>2]=0,n[c>>2]=n[Ql>>2],n[c+4>>2]=n[Ql+4>>2],vs(s,3983,c)|0,n[Ps>>2]=20,n[Ps+4>>2]=0,n[c>>2]=n[Ps>>2],n[c+4>>2]=n[Ps+4>>2],mw(s,3999,c)|0,n[Ws>>2]=8,n[Ws+4>>2]=0,n[c>>2]=n[Ws>>2],n[c+4>>2]=n[Ws+4>>2],vs(s,4012,c)|0,n[of>>2]=9,n[of+4>>2]=0,n[c>>2]=n[of>>2],n[c+4>>2]=n[of+4>>2],vs(s,4022,c)|0,n[sf>>2]=21,n[sf+4>>2]=0,n[c>>2]=n[sf>>2],n[c+4>>2]=n[sf+4>>2],mw(s,4039,c)|0,n[Ou>>2]=10,n[Ou+4>>2]=0,n[c>>2]=n[Ou>>2],n[c+4>>2]=n[Ou+4>>2],vs(s,4053,c)|0,n[nf>>2]=11,n[nf+4>>2]=0,n[c>>2]=n[nf>>2],n[c+4>>2]=n[nf+4>>2],vs(s,4065,c)|0,n[rf>>2]=12,n[rf+4>>2]=0,n[c>>2]=n[rf>>2],n[c+4>>2]=n[rf+4>>2],vs(s,4084,c)|0,n[kl>>2]=13,n[kl+4>>2]=0,n[c>>2]=n[kl>>2],n[c+4>>2]=n[kl+4>>2],vs(s,4097,c)|0,n[Wo>>2]=14,n[Wo+4>>2]=0,n[c>>2]=n[Wo>>2],n[c+4>>2]=n[Wo+4>>2],vs(s,4117,c)|0,n[Za>>2]=15,n[Za+4>>2]=0,n[c>>2]=n[Za>>2],n[c+4>>2]=n[Za+4>>2],vs(s,4129,c)|0,n[Ys>>2]=16,n[Ys+4>>2]=0,n[c>>2]=n[Ys>>2],n[c+4>>2]=n[Ys+4>>2],vs(s,4148,c)|0,n[Qc>>2]=17,n[Qc+4>>2]=0,n[c>>2]=n[Qc>>2],n[c+4>>2]=n[Qc+4>>2],vs(s,4161,c)|0,n[Mu>>2]=18,n[Mu+4>>2]=0,n[c>>2]=n[Mu>>2],n[c+4>>2]=n[Mu+4>>2],vs(s,4181,c)|0,n[Lu>>2]=5,n[Lu+4>>2]=0,n[c>>2]=n[Lu>>2],n[c+4>>2]=n[Lu+4>>2],I0(s,4196,c)|0,n[Mp>>2]=6,n[Mp+4>>2]=0,n[c>>2]=n[Mp>>2],n[c+4>>2]=n[Mp+4>>2],I0(s,4206,c)|0,n[Lp>>2]=7,n[Lp+4>>2]=0,n[c>>2]=n[Lp>>2],n[c+4>>2]=n[Lp+4>>2],I0(s,4217,c)|0,n[kc>>2]=3,n[kc+4>>2]=0,n[c>>2]=n[kc>>2],n[c+4>>2]=n[kc+4>>2],zA(s,4235,c)|0,n[Np>>2]=1,n[Np+4>>2]=0,n[c>>2]=n[Np>>2],n[c+4>>2]=n[Np+4>>2],dF(s,4251,c)|0,n[xl>>2]=4,n[xl+4>>2]=0,n[c>>2]=n[xl>>2],n[c+4>>2]=n[xl+4>>2],zA(s,4263,c)|0,n[Xr>>2]=5,n[Xr+4>>2]=0,n[c>>2]=n[Xr>>2],n[c+4>>2]=n[Xr+4>>2],zA(s,4279,c)|0,n[Tp>>2]=6,n[Tp+4>>2]=0,n[c>>2]=n[Tp>>2],n[c+4>>2]=n[Tp+4>>2],zA(s,4293,c)|0,n[Rp>>2]=7,n[Rp+4>>2]=0,n[c>>2]=n[Rp>>2],n[c+4>>2]=n[Rp+4>>2],zA(s,4306,c)|0,n[Fp>>2]=8,n[Fp+4>>2]=0,n[c>>2]=n[Fp>>2],n[c+4>>2]=n[Fp+4>>2],zA(s,4323,c)|0,n[Nu>>2]=9,n[Nu+4>>2]=0,n[c>>2]=n[Nu>>2],n[c+4>>2]=n[Nu+4>>2],zA(s,4335,c)|0,n[Tu>>2]=2,n[Tu+4>>2]=0,n[c>>2]=n[Tu>>2],n[c+4>>2]=n[Tu+4>>2],dF(s,4353,c)|0,n[Qp>>2]=12,n[Qp+4>>2]=0,n[c>>2]=n[Qp>>2],n[c+4>>2]=n[Qp+4>>2],B0(s,4363,c)|0,n[bl>>2]=1,n[bl+4>>2]=0,n[c>>2]=n[bl>>2],n[c+4>>2]=n[bl+4>>2],JA(s,4376,c)|0,n[kp>>2]=2,n[kp+4>>2]=0,n[c>>2]=n[kp>>2],n[c+4>>2]=n[kp+4>>2],JA(s,4388,c)|0,n[xp>>2]=13,n[xp+4>>2]=0,n[c>>2]=n[xp>>2],n[c+4>>2]=n[xp+4>>2],B0(s,4402,c)|0,n[Ca>>2]=14,n[Ca+4>>2]=0,n[c>>2]=n[Ca>>2],n[c+4>>2]=n[Ca+4>>2],B0(s,4411,c)|0,n[yo>>2]=15,n[yo+4>>2]=0,n[c>>2]=n[yo>>2],n[c+4>>2]=n[yo+4>>2],B0(s,4421,c)|0,n[mo>>2]=16,n[mo+4>>2]=0,n[c>>2]=n[mo>>2],n[c+4>>2]=n[mo+4>>2],B0(s,4433,c)|0,n[go>>2]=17,n[go+4>>2]=0,n[c>>2]=n[go>>2],n[c+4>>2]=n[go+4>>2],B0(s,4446,c)|0,n[xn>>2]=18,n[xn+4>>2]=0,n[c>>2]=n[xn>>2],n[c+4>>2]=n[xn+4>>2],B0(s,4458,c)|0,n[ar>>2]=3,n[ar+4>>2]=0,n[c>>2]=n[ar>>2],n[c+4>>2]=n[ar+4>>2],JA(s,4471,c)|0,n[Tr>>2]=1,n[Tr+4>>2]=0,n[c>>2]=n[Tr>>2],n[c+4>>2]=n[Tr+4>>2],Hv(s,4486,c)|0,n[Pr>>2]=10,n[Pr+4>>2]=0,n[c>>2]=n[Pr>>2],n[c+4>>2]=n[Pr+4>>2],zA(s,4496,c)|0,n[Xt>>2]=11,n[Xt+4>>2]=0,n[c>>2]=n[Xt>>2],n[c+4>>2]=n[Xt+4>>2],zA(s,4508,c)|0,n[cr>>2]=3,n[cr+4>>2]=0,n[c>>2]=n[cr>>2],n[c+4>>2]=n[cr+4>>2],dF(s,4519,c)|0,n[Or>>2]=4,n[Or+4>>2]=0,n[c>>2]=n[Or>>2],n[c+4>>2]=n[Or+4>>2],Kve(s,4530,c)|0,n[Lt>>2]=19,n[Lt+4>>2]=0,n[c>>2]=n[Lt>>2],n[c+4>>2]=n[Lt+4>>2],Vve(s,4542,c)|0,n[qe>>2]=12,n[qe+4>>2]=0,n[c>>2]=n[qe>>2],n[c+4>>2]=n[qe+4>>2],zve(s,4554,c)|0,n[_e>>2]=13,n[_e+4>>2]=0,n[c>>2]=n[_e>>2],n[c+4>>2]=n[_e+4>>2],Jve(s,4568,c)|0,n[lt>>2]=2,n[lt+4>>2]=0,n[c>>2]=n[lt>>2],n[c+4>>2]=n[lt+4>>2],Xve(s,4578,c)|0,n[Je>>2]=20,n[Je+4>>2]=0,n[c>>2]=n[Je>>2],n[c+4>>2]=n[Je+4>>2],Zve(s,4587,c)|0,n[$e>>2]=22,n[$e+4>>2]=0,n[c>>2]=n[$e>>2],n[c+4>>2]=n[$e+4>>2],mw(s,4602,c)|0,n[Qe>>2]=23,n[Qe+4>>2]=0,n[c>>2]=n[Qe>>2],n[c+4>>2]=n[Qe+4>>2],mw(s,4619,c)|0,n[Oe>>2]=14,n[Oe+4>>2]=0,n[c>>2]=n[Oe>>2],n[c+4>>2]=n[Oe+4>>2],$ve(s,4629,c)|0,n[je>>2]=1,n[je+4>>2]=0,n[c>>2]=n[je>>2],n[c+4>>2]=n[je+4>>2],eDe(s,4637,c)|0,n[se>>2]=4,n[se+4>>2]=0,n[c>>2]=n[se>>2],n[c+4>>2]=n[se+4>>2],JA(s,4653,c)|0,n[j>>2]=5,n[j+4>>2]=0,n[c>>2]=n[j>>2],n[c+4>>2]=n[j+4>>2],JA(s,4669,c)|0,n[M>>2]=6,n[M+4>>2]=0,n[c>>2]=n[M>>2],n[c+4>>2]=n[M+4>>2],JA(s,4686,c)|0,n[O>>2]=7,n[O+4>>2]=0,n[c>>2]=n[O>>2],n[c+4>>2]=n[O+4>>2],JA(s,4701,c)|0,n[Q>>2]=8,n[Q+4>>2]=0,n[c>>2]=n[Q>>2],n[c+4>>2]=n[Q+4>>2],JA(s,4719,c)|0,n[k>>2]=9,n[k+4>>2]=0,n[c>>2]=n[k>>2],n[c+4>>2]=n[k+4>>2],JA(s,4736,c)|0,n[B>>2]=21,n[B+4>>2]=0,n[c>>2]=n[B>>2],n[c+4>>2]=n[B+4>>2],tDe(s,4754,c)|0,n[m>>2]=2,n[m+4>>2]=0,n[c>>2]=n[m>>2],n[c+4>>2]=n[m+4>>2],Hv(s,4772,c)|0,n[d>>2]=3,n[d+4>>2]=0,n[c>>2]=n[d>>2],n[c+4>>2]=n[d+4>>2],Hv(s,4790,c)|0,n[f>>2]=4,n[f+4>>2]=0,n[c>>2]=n[f>>2],n[c+4>>2]=n[f+4>>2],Hv(s,4808,c)|0,C=l}function qve(s,l){s=s|0,l=l|0;var c=0;c=aFe()|0,n[s>>2]=c,lFe(c,l),Pp(n[s>>2]|0)}function jve(s,l,c){return s=s|0,l=l|0,c=c|0,KQe(s,pn(l)|0,c,0),s|0}function Gve(s,l,c){return s=s|0,l=l|0,c=c|0,QQe(s,pn(l)|0,c,0),s|0}function Yve(s,l,c){return s=s|0,l=l|0,c=c|0,mQe(s,pn(l)|0,c,0),s|0}function mw(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],tQe(s,l,d),C=f,s|0}function Wve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Lke(s,l,d),C=f,s|0}function Qu(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Cke(s,l,d),C=f,s|0}function I0(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],ike(s,l,d),C=f,s|0}function vs(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],qxe(s,l,d),C=f,s|0}function zA(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Pxe(s,l,d),C=f,s|0}function dF(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],uxe(s,l,d),C=f,s|0}function B0(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Lbe(s,l,d),C=f,s|0}function JA(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Cbe(s,l,d),C=f,s|0}function Hv(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],ibe(s,l,d),C=f,s|0}function Kve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],qSe(s,l,d),C=f,s|0}function Vve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],PSe(s,l,d),C=f,s|0}function zve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],ASe(s,l,d),C=f,s|0}function Jve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],JPe(s,l,d),C=f,s|0}function Xve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],RPe(s,l,d),C=f,s|0}function Zve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],dPe(s,l,d),C=f,s|0}function $ve(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],ePe(s,l,d),C=f,s|0}function eDe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],LDe(s,l,d),C=f,s|0}function tDe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],rDe(s,l,d),C=f,s|0}function rDe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],nDe(s,c,d,1),C=f}function pn(s){return s=s|0,s|0}function nDe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=mF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=iDe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,sDe(m,f)|0,f),C=d}function mF(){var s=0,l=0;if(o[7616]|0||(X5(9136),ir(24,9136,U|0)|0,l=7616,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9136)|0)){s=9136,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));X5(9136)}return 9136}function iDe(s){return s=s|0,0}function sDe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=mF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],J5(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(lDe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function hn(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0;B=C,C=C+32|0,se=B+24|0,j=B+20|0,Q=B+16|0,M=B+12|0,O=B+8|0,k=B+4|0,je=B,n[j>>2]=l,n[Q>>2]=c,n[M>>2]=f,n[O>>2]=d,n[k>>2]=m,m=s+28|0,n[je>>2]=n[m>>2],n[se>>2]=n[je>>2],oDe(s+24|0,se,j,M,O,Q,k)|0,n[m>>2]=n[n[m>>2]>>2],C=B}function oDe(s,l,c,f,d,m,B){return s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,B=B|0,s=aDe(l)|0,l=Kt(24)|0,z5(l+4|0,n[c>>2]|0,n[f>>2]|0,n[d>>2]|0,n[m>>2]|0,n[B>>2]|0),n[l>>2]=n[s>>2],n[s>>2]=l,l|0}function aDe(s){return s=s|0,n[s>>2]|0}function z5(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,n[s>>2]=l,n[s+4>>2]=c,n[s+8>>2]=f,n[s+12>>2]=d,n[s+16>>2]=m}function gr(s,l){return s=s|0,l=l|0,l|s|0}function J5(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function lDe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=cDe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,uDe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],J5(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,ADe(s,k),fDe(k),C=O;return}}function cDe(s){return s=s|0,357913941}function uDe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function ADe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function fDe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function X5(s){s=s|0,gDe(s)}function pDe(s){s=s|0,hDe(s+24|0)}function Rr(s){return s=s|0,n[s>>2]|0}function hDe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function gDe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,3,l,dDe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Vr(){return 9228}function dDe(){return 1140}function mDe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=yDe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=EDe(l,f)|0,C=c,l|0}function zr(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,n[s>>2]=l,n[s+4>>2]=c,n[s+8>>2]=f,n[s+12>>2]=d,n[s+16>>2]=m}function yDe(s){return s=s|0,(n[(mF()|0)+24>>2]|0)+(s*12|0)|0}function EDe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;return d=C,C=C+48|0,f=d,c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),tf[c&31](f,s),f=CDe(f)|0,C=d,f|0}function CDe(s){s=s|0;var l=0,c=0,f=0,d=0;return d=C,C=C+32|0,l=d+12|0,c=d,f=yF(Z5()|0)|0,f?(EF(l,f),CF(c,l),wDe(s,c),s=wF(l)|0):s=IDe(s)|0,C=d,s|0}function Z5(){var s=0;return o[7632]|0||(FDe(9184),ir(25,9184,U|0)|0,s=7632,n[s>>2]=1,n[s+4>>2]=0),9184}function yF(s){return s=s|0,n[s+36>>2]|0}function EF(s,l){s=s|0,l=l|0,n[s>>2]=l,n[s+4>>2]=s,n[s+8>>2]=0}function CF(s,l){s=s|0,l=l|0,n[s>>2]=n[l>>2],n[s+4>>2]=n[l+4>>2],n[s+8>>2]=0}function wDe(s,l){s=s|0,l=l|0,PDe(l,s,s+8|0,s+16|0,s+24|0,s+32|0,s+40|0)|0}function wF(s){return s=s|0,n[(n[s+4>>2]|0)+8>>2]|0}function IDe(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0;Q=C,C=C+16|0,c=Q+4|0,f=Q,d=Va(8)|0,m=d,B=Kt(48)|0,k=B,l=k+48|0;do n[k>>2]=n[s>>2],k=k+4|0,s=s+4|0;while((k|0)<(l|0));return l=m+4|0,n[l>>2]=B,k=Kt(8)|0,B=n[l>>2]|0,n[f>>2]=0,n[c>>2]=n[f>>2],$5(k,B,c),n[d>>2]=k,C=Q,m|0}function $5(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,c=Kt(16)|0,n[c+4>>2]=0,n[c+8>>2]=0,n[c>>2]=1092,n[c+12>>2]=l,n[s+4>>2]=c}function BDe(s){s=s|0,Md(s),gt(s)}function vDe(s){s=s|0,s=n[s+12>>2]|0,s|0&>(s)}function DDe(s){s=s|0,gt(s)}function PDe(s,l,c,f,d,m,B){return s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,B=B|0,m=SDe(n[s>>2]|0,l,c,f,d,m,B)|0,B=s+4|0,n[(n[B>>2]|0)+8>>2]=m,n[(n[B>>2]|0)+8>>2]|0}function SDe(s,l,c,f,d,m,B){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,B=B|0;var k=0,Q=0;return k=C,C=C+16|0,Q=k,za(Q),s=ya(s)|0,B=bDe(s,+E[l>>3],+E[c>>3],+E[f>>3],+E[d>>3],+E[m>>3],+E[B>>3])|0,Ja(Q),C=k,B|0}function bDe(s,l,c,f,d,m,B){s=s|0,l=+l,c=+c,f=+f,d=+d,m=+m,B=+B;var k=0;return k=Pl(xDe()|0)|0,l=+VA(l),c=+VA(c),f=+VA(f),d=+VA(d),m=+VA(m),_s(0,k|0,s|0,+l,+c,+f,+d,+m,+ +VA(B))|0}function xDe(){var s=0;return o[7624]|0||(kDe(9172),s=7624,n[s>>2]=1,n[s+4>>2]=0),9172}function kDe(s){s=s|0,Sl(s,QDe()|0,6)}function QDe(){return 1112}function FDe(s){s=s|0,wp(s)}function RDe(s){s=s|0,eG(s+24|0),tG(s+16|0)}function eG(s){s=s|0,NDe(s)}function tG(s){s=s|0,TDe(s)}function TDe(s){s=s|0;var l=0,c=0;if(l=n[s>>2]|0,l|0)do c=l,l=n[l>>2]|0,gt(c);while(l|0);n[s>>2]=0}function NDe(s){s=s|0;var l=0,c=0;if(l=n[s>>2]|0,l|0)do c=l,l=n[l>>2]|0,gt(c);while(l|0);n[s>>2]=0}function wp(s){s=s|0;var l=0;n[s+16>>2]=0,n[s+20>>2]=0,l=s+24|0,n[l>>2]=0,n[s+28>>2]=l,n[s+36>>2]=0,o[s+40>>0]=0,o[s+41>>0]=0}function LDe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],MDe(s,c,d,0),C=f}function MDe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=IF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=ODe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,UDe(m,f)|0,f),C=d}function IF(){var s=0,l=0;if(o[7640]|0||(nG(9232),ir(26,9232,U|0)|0,l=7640,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9232)|0)){s=9232,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));nG(9232)}return 9232}function ODe(s){return s=s|0,0}function UDe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=IF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],rG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(_De(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function rG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function _De(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=HDe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,qDe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],rG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,jDe(s,k),GDe(k),C=O;return}}function HDe(s){return s=s|0,357913941}function qDe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function jDe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function GDe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function nG(s){s=s|0,KDe(s)}function YDe(s){s=s|0,WDe(s+24|0)}function WDe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function KDe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,1,l,VDe()|0,3),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function VDe(){return 1144}function zDe(s,l,c,f,d){s=s|0,l=l|0,c=+c,f=+f,d=d|0;var m=0,B=0,k=0,Q=0;m=C,C=C+16|0,B=m+8|0,k=m,Q=JDe(s)|0,s=n[Q+4>>2]|0,n[k>>2]=n[Q>>2],n[k+4>>2]=s,n[B>>2]=n[k>>2],n[B+4>>2]=n[k+4>>2],XDe(l,B,c,f,d),C=m}function JDe(s){return s=s|0,(n[(IF()|0)+24>>2]|0)+(s*12|0)|0}function XDe(s,l,c,f,d){s=s|0,l=l|0,c=+c,f=+f,d=d|0;var m=0,B=0,k=0,Q=0,O=0;O=C,C=C+16|0,B=O+2|0,k=O+1|0,Q=O,m=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(m=n[(n[s>>2]|0)+m>>2]|0),Fu(B,c),c=+Ru(B,c),Fu(k,f),f=+Ru(k,f),XA(Q,d),Q=ZA(Q,d)|0,y7[m&1](s,c,f,Q),C=O}function Fu(s,l){s=s|0,l=+l}function Ru(s,l){return s=s|0,l=+l,+ +$De(l)}function XA(s,l){s=s|0,l=l|0}function ZA(s,l){return s=s|0,l=l|0,ZDe(l)|0}function ZDe(s){return s=s|0,s|0}function $De(s){return s=+s,+s}function ePe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],tPe(s,c,d,1),C=f}function tPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=BF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=rPe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,nPe(m,f)|0,f),C=d}function BF(){var s=0,l=0;if(o[7648]|0||(sG(9268),ir(27,9268,U|0)|0,l=7648,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9268)|0)){s=9268,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));sG(9268)}return 9268}function rPe(s){return s=s|0,0}function nPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=BF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],iG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(iPe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function iG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function iPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=sPe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,oPe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],iG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,aPe(s,k),lPe(k),C=O;return}}function sPe(s){return s=s|0,357913941}function oPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function aPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function lPe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function sG(s){s=s|0,APe(s)}function cPe(s){s=s|0,uPe(s+24|0)}function uPe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function APe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,4,l,fPe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function fPe(){return 1160}function pPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=hPe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=gPe(l,f)|0,C=c,l|0}function hPe(s){return s=s|0,(n[(BF()|0)+24>>2]|0)+(s*12|0)|0}function gPe(s,l){s=s|0,l=l|0;var c=0;return c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),oG(F0[c&31](s)|0)|0}function oG(s){return s=s|0,s&1|0}function dPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],mPe(s,c,d,0),C=f}function mPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=vF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=yPe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,EPe(m,f)|0,f),C=d}function vF(){var s=0,l=0;if(o[7656]|0||(lG(9304),ir(28,9304,U|0)|0,l=7656,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9304)|0)){s=9304,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));lG(9304)}return 9304}function yPe(s){return s=s|0,0}function EPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=vF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],aG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(CPe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function aG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function CPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=wPe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,IPe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],aG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,BPe(s,k),vPe(k),C=O;return}}function wPe(s){return s=s|0,357913941}function IPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function BPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function vPe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function lG(s){s=s|0,SPe(s)}function DPe(s){s=s|0,PPe(s+24|0)}function PPe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function SPe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,5,l,bPe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function bPe(){return 1164}function xPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=kPe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],QPe(l,d,c),C=f}function kPe(s){return s=s|0,(n[(vF()|0)+24>>2]|0)+(s*12|0)|0}function QPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),Ip(d,c),c=Bp(d,c)|0,tf[f&31](s,c),vp(d),C=m}function Ip(s,l){s=s|0,l=l|0,FPe(s,l)}function Bp(s,l){return s=s|0,l=l|0,s|0}function vp(s){s=s|0,qA(s)}function FPe(s,l){s=s|0,l=l|0,DF(s,l)}function DF(s,l){s=s|0,l=l|0,n[s>>2]=l}function RPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],TPe(s,c,d,0),C=f}function TPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=PF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=NPe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,LPe(m,f)|0,f),C=d}function PF(){var s=0,l=0;if(o[7664]|0||(uG(9340),ir(29,9340,U|0)|0,l=7664,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9340)|0)){s=9340,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));uG(9340)}return 9340}function NPe(s){return s=s|0,0}function LPe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=PF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],cG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(MPe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function cG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function MPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=OPe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,UPe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],cG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,_Pe(s,k),HPe(k),C=O;return}}function OPe(s){return s=s|0,357913941}function UPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function _Pe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function HPe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function uG(s){s=s|0,GPe(s)}function qPe(s){s=s|0,jPe(s+24|0)}function jPe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function GPe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,4,l,YPe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function YPe(){return 1180}function WPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=KPe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],c=VPe(l,d,c)|0,C=f,c|0}function KPe(s){return s=s|0,(n[(PF()|0)+24>>2]|0)+(s*12|0)|0}function VPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;return m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),v0(d,c),d=D0(d,c)|0,d=qv(IR[f&15](s,d)|0)|0,C=m,d|0}function v0(s,l){s=s|0,l=l|0}function D0(s,l){return s=s|0,l=l|0,zPe(l)|0}function qv(s){return s=s|0,s|0}function zPe(s){return s=s|0,s|0}function JPe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],XPe(s,c,d,0),C=f}function XPe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=SF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=ZPe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,$Pe(m,f)|0,f),C=d}function SF(){var s=0,l=0;if(o[7672]|0||(fG(9376),ir(30,9376,U|0)|0,l=7672,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9376)|0)){s=9376,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));fG(9376)}return 9376}function ZPe(s){return s=s|0,0}function $Pe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=SF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],AG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(eSe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function AG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function eSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=tSe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,rSe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],AG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,nSe(s,k),iSe(k),C=O;return}}function tSe(s){return s=s|0,357913941}function rSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function nSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function iSe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function fG(s){s=s|0,aSe(s)}function sSe(s){s=s|0,oSe(s+24|0)}function oSe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function aSe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,5,l,pG()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function pG(){return 1196}function lSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=cSe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=uSe(l,f)|0,C=c,l|0}function cSe(s){return s=s|0,(n[(SF()|0)+24>>2]|0)+(s*12|0)|0}function uSe(s,l){s=s|0,l=l|0;var c=0;return c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),qv(F0[c&31](s)|0)|0}function ASe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],fSe(s,c,d,1),C=f}function fSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=bF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=pSe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,hSe(m,f)|0,f),C=d}function bF(){var s=0,l=0;if(o[7680]|0||(gG(9412),ir(31,9412,U|0)|0,l=7680,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9412)|0)){s=9412,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));gG(9412)}return 9412}function pSe(s){return s=s|0,0}function hSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=bF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],hG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(gSe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function hG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function gSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=dSe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,mSe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],hG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,ySe(s,k),ESe(k),C=O;return}}function dSe(s){return s=s|0,357913941}function mSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function ySe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function ESe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function gG(s){s=s|0,ISe(s)}function CSe(s){s=s|0,wSe(s+24|0)}function wSe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function ISe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,6,l,dG()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function dG(){return 1200}function BSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=vSe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=DSe(l,f)|0,C=c,l|0}function vSe(s){return s=s|0,(n[(bF()|0)+24>>2]|0)+(s*12|0)|0}function DSe(s,l){s=s|0,l=l|0;var c=0;return c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),jv(F0[c&31](s)|0)|0}function jv(s){return s=s|0,s|0}function PSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],SSe(s,c,d,0),C=f}function SSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=xF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=bSe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,xSe(m,f)|0,f),C=d}function xF(){var s=0,l=0;if(o[7688]|0||(yG(9448),ir(32,9448,U|0)|0,l=7688,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9448)|0)){s=9448,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));yG(9448)}return 9448}function bSe(s){return s=s|0,0}function xSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=xF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],mG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(kSe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function mG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function kSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=QSe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,FSe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],mG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,RSe(s,k),TSe(k),C=O;return}}function QSe(s){return s=s|0,357913941}function FSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function RSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function TSe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function yG(s){s=s|0,MSe(s)}function NSe(s){s=s|0,LSe(s+24|0)}function LSe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function MSe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,6,l,EG()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function EG(){return 1204}function OSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=USe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],_Se(l,d,c),C=f}function USe(s){return s=s|0,(n[(xF()|0)+24>>2]|0)+(s*12|0)|0}function _Se(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),kF(d,c),d=QF(d,c)|0,tf[f&31](s,d),C=m}function kF(s,l){s=s|0,l=l|0}function QF(s,l){return s=s|0,l=l|0,HSe(l)|0}function HSe(s){return s=s|0,s|0}function qSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],jSe(s,c,d,0),C=f}function jSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=FF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=GSe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,YSe(m,f)|0,f),C=d}function FF(){var s=0,l=0;if(o[7696]|0||(wG(9484),ir(33,9484,U|0)|0,l=7696,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9484)|0)){s=9484,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));wG(9484)}return 9484}function GSe(s){return s=s|0,0}function YSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=FF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],CG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(WSe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function CG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function WSe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=KSe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,VSe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],CG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,zSe(s,k),JSe(k),C=O;return}}function KSe(s){return s=s|0,357913941}function VSe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function zSe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function JSe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function wG(s){s=s|0,$Se(s)}function XSe(s){s=s|0,ZSe(s+24|0)}function ZSe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function $Se(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,1,l,ebe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function ebe(){return 1212}function tbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+16|0,m=d+8|0,B=d,k=rbe(s)|0,s=n[k+4>>2]|0,n[B>>2]=n[k>>2],n[B+4>>2]=s,n[m>>2]=n[B>>2],n[m+4>>2]=n[B+4>>2],nbe(l,m,c,f),C=d}function rbe(s){return s=s|0,(n[(FF()|0)+24>>2]|0)+(s*12|0)|0}function nbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;k=C,C=C+16|0,m=k+1|0,B=k,d=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(d=n[(n[s>>2]|0)+d>>2]|0),kF(m,c),m=QF(m,c)|0,v0(B,f),B=D0(B,f)|0,vw[d&15](s,m,B),C=k}function ibe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],sbe(s,c,d,1),C=f}function sbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=RF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=obe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,abe(m,f)|0,f),C=d}function RF(){var s=0,l=0;if(o[7704]|0||(BG(9520),ir(34,9520,U|0)|0,l=7704,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9520)|0)){s=9520,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));BG(9520)}return 9520}function obe(s){return s=s|0,0}function abe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=RF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],IG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(lbe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function IG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function lbe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=cbe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,ube(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],IG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,Abe(s,k),fbe(k),C=O;return}}function cbe(s){return s=s|0,357913941}function ube(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function Abe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function fbe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function BG(s){s=s|0,gbe(s)}function pbe(s){s=s|0,hbe(s+24|0)}function hbe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function gbe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,1,l,dbe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function dbe(){return 1224}function mbe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;return d=C,C=C+16|0,m=d+8|0,B=d,k=ybe(s)|0,s=n[k+4>>2]|0,n[B>>2]=n[k>>2],n[B+4>>2]=s,n[m>>2]=n[B>>2],n[m+4>>2]=n[B+4>>2],f=+Ebe(l,m,c),C=d,+f}function ybe(s){return s=s|0,(n[(RF()|0)+24>>2]|0)+(s*12|0)|0}function Ebe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),XA(d,c),d=ZA(d,c)|0,B=+gF(+C7[f&7](s,d)),C=m,+B}function Cbe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],wbe(s,c,d,1),C=f}function wbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=TF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=Ibe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,Bbe(m,f)|0,f),C=d}function TF(){var s=0,l=0;if(o[7712]|0||(DG(9556),ir(35,9556,U|0)|0,l=7712,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9556)|0)){s=9556,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));DG(9556)}return 9556}function Ibe(s){return s=s|0,0}function Bbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=TF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],vG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(vbe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function vG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function vbe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Dbe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,Pbe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],vG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,Sbe(s,k),bbe(k),C=O;return}}function Dbe(s){return s=s|0,357913941}function Pbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function Sbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function bbe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function DG(s){s=s|0,Qbe(s)}function xbe(s){s=s|0,kbe(s+24|0)}function kbe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function Qbe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,5,l,Fbe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Fbe(){return 1232}function Rbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=Tbe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],c=+Nbe(l,d),C=f,+c}function Tbe(s){return s=s|0,(n[(TF()|0)+24>>2]|0)+(s*12|0)|0}function Nbe(s,l){s=s|0,l=l|0;var c=0;return c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),+ +gF(+E7[c&15](s))}function Lbe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Mbe(s,c,d,1),C=f}function Mbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=NF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=Obe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,Ube(m,f)|0,f),C=d}function NF(){var s=0,l=0;if(o[7720]|0||(SG(9592),ir(36,9592,U|0)|0,l=7720,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9592)|0)){s=9592,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));SG(9592)}return 9592}function Obe(s){return s=s|0,0}function Ube(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=NF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],PG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(_be(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function PG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function _be(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Hbe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,qbe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],PG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,jbe(s,k),Gbe(k),C=O;return}}function Hbe(s){return s=s|0,357913941}function qbe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function jbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function Gbe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function SG(s){s=s|0,Kbe(s)}function Ybe(s){s=s|0,Wbe(s+24|0)}function Wbe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function Kbe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,7,l,Vbe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Vbe(){return 1276}function zbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=Jbe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=Xbe(l,f)|0,C=c,l|0}function Jbe(s){return s=s|0,(n[(NF()|0)+24>>2]|0)+(s*12|0)|0}function Xbe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;return d=C,C=C+16|0,f=d,c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),tf[c&31](f,s),f=bG(f)|0,C=d,f|0}function bG(s){s=s|0;var l=0,c=0,f=0,d=0;return d=C,C=C+32|0,l=d+12|0,c=d,f=yF(xG()|0)|0,f?(EF(l,f),CF(c,l),Zbe(s,c),s=wF(l)|0):s=$be(s)|0,C=d,s|0}function xG(){var s=0;return o[7736]|0||(cxe(9640),ir(25,9640,U|0)|0,s=7736,n[s>>2]=1,n[s+4>>2]=0),9640}function Zbe(s,l){s=s|0,l=l|0,nxe(l,s,s+8|0)|0}function $be(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0;return c=C,C=C+16|0,d=c+4|0,B=c,f=Va(8)|0,l=f,k=Kt(16)|0,n[k>>2]=n[s>>2],n[k+4>>2]=n[s+4>>2],n[k+8>>2]=n[s+8>>2],n[k+12>>2]=n[s+12>>2],m=l+4|0,n[m>>2]=k,s=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],LF(s,m,d),n[f>>2]=s,C=c,l|0}function LF(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,c=Kt(16)|0,n[c+4>>2]=0,n[c+8>>2]=0,n[c>>2]=1244,n[c+12>>2]=l,n[s+4>>2]=c}function exe(s){s=s|0,Md(s),gt(s)}function txe(s){s=s|0,s=n[s+12>>2]|0,s|0&>(s)}function rxe(s){s=s|0,gt(s)}function nxe(s,l,c){return s=s|0,l=l|0,c=c|0,l=ixe(n[s>>2]|0,l,c)|0,c=s+4|0,n[(n[c>>2]|0)+8>>2]=l,n[(n[c>>2]|0)+8>>2]|0}function ixe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;return f=C,C=C+16|0,d=f,za(d),s=ya(s)|0,c=sxe(s,n[l>>2]|0,+E[c>>3])|0,Ja(d),C=f,c|0}function sxe(s,l,c){s=s|0,l=l|0,c=+c;var f=0;return f=Pl(oxe()|0)|0,l=hF(l)|0,ml(0,f|0,s|0,l|0,+ +VA(c))|0}function oxe(){var s=0;return o[7728]|0||(axe(9628),s=7728,n[s>>2]=1,n[s+4>>2]=0),9628}function axe(s){s=s|0,Sl(s,lxe()|0,2)}function lxe(){return 1264}function cxe(s){s=s|0,wp(s)}function uxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Axe(s,c,d,1),C=f}function Axe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=MF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=fxe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,pxe(m,f)|0,f),C=d}function MF(){var s=0,l=0;if(o[7744]|0||(QG(9684),ir(37,9684,U|0)|0,l=7744,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9684)|0)){s=9684,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));QG(9684)}return 9684}function fxe(s){return s=s|0,0}function pxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=MF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],kG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(hxe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function kG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function hxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=gxe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,dxe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],kG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,mxe(s,k),yxe(k),C=O;return}}function gxe(s){return s=s|0,357913941}function dxe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function mxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function yxe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function QG(s){s=s|0,wxe(s)}function Exe(s){s=s|0,Cxe(s+24|0)}function Cxe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function wxe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,5,l,Ixe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Ixe(){return 1280}function Bxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=vxe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],c=Dxe(l,d,c)|0,C=f,c|0}function vxe(s){return s=s|0,(n[(MF()|0)+24>>2]|0)+(s*12|0)|0}function Dxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return B=C,C=C+32|0,d=B,m=B+16|0,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),XA(m,c),m=ZA(m,c)|0,vw[f&15](d,s,m),m=bG(d)|0,C=B,m|0}function Pxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Sxe(s,c,d,1),C=f}function Sxe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=OF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=bxe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,xxe(m,f)|0,f),C=d}function OF(){var s=0,l=0;if(o[7752]|0||(RG(9720),ir(38,9720,U|0)|0,l=7752,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9720)|0)){s=9720,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));RG(9720)}return 9720}function bxe(s){return s=s|0,0}function xxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=OF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],FG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(kxe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function FG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function kxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Qxe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,Fxe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],FG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,Rxe(s,k),Txe(k),C=O;return}}function Qxe(s){return s=s|0,357913941}function Fxe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function Rxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function Txe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function RG(s){s=s|0,Mxe(s)}function Nxe(s){s=s|0,Lxe(s+24|0)}function Lxe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function Mxe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,8,l,Oxe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Oxe(){return 1288}function Uxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;return c=C,C=C+16|0,f=c+8|0,d=c,m=_xe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],l=Hxe(l,f)|0,C=c,l|0}function _xe(s){return s=s|0,(n[(OF()|0)+24>>2]|0)+(s*12|0)|0}function Hxe(s,l){s=s|0,l=l|0;var c=0;return c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),V5(F0[c&31](s)|0)|0}function qxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],jxe(s,c,d,0),C=f}function jxe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=UF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=Gxe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,Yxe(m,f)|0,f),C=d}function UF(){var s=0,l=0;if(o[7760]|0||(NG(9756),ir(39,9756,U|0)|0,l=7760,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9756)|0)){s=9756,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));NG(9756)}return 9756}function Gxe(s){return s=s|0,0}function Yxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=UF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],TG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(Wxe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function TG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function Wxe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Kxe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,Vxe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],TG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,zxe(s,k),Jxe(k),C=O;return}}function Kxe(s){return s=s|0,357913941}function Vxe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function zxe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function Jxe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function NG(s){s=s|0,$xe(s)}function Xxe(s){s=s|0,Zxe(s+24|0)}function Zxe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function $xe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,8,l,eke()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function eke(){return 1292}function tke(s,l,c){s=s|0,l=l|0,c=+c;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=rke(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],nke(l,d,c),C=f}function rke(s){return s=s|0,(n[(UF()|0)+24>>2]|0)+(s*12|0)|0}function nke(s,l,c){s=s|0,l=l|0,c=+c;var f=0,d=0,m=0;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),Fu(d,c),c=+Ru(d,c),d7[f&31](s,c),C=m}function ike(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],ske(s,c,d,0),C=f}function ske(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=_F()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=oke(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,ake(m,f)|0,f),C=d}function _F(){var s=0,l=0;if(o[7768]|0||(MG(9792),ir(40,9792,U|0)|0,l=7768,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9792)|0)){s=9792,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));MG(9792)}return 9792}function oke(s){return s=s|0,0}function ake(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=_F()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],LG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(lke(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function LG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function lke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=cke(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,uke(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],LG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,Ake(s,k),fke(k),C=O;return}}function cke(s){return s=s|0,357913941}function uke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function Ake(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function fke(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function MG(s){s=s|0,gke(s)}function pke(s){s=s|0,hke(s+24|0)}function hke(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function gke(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,1,l,dke()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function dke(){return 1300}function mke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=+f;var d=0,m=0,B=0,k=0;d=C,C=C+16|0,m=d+8|0,B=d,k=yke(s)|0,s=n[k+4>>2]|0,n[B>>2]=n[k>>2],n[B+4>>2]=s,n[m>>2]=n[B>>2],n[m+4>>2]=n[B+4>>2],Eke(l,m,c,f),C=d}function yke(s){return s=s|0,(n[(_F()|0)+24>>2]|0)+(s*12|0)|0}function Eke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=+f;var d=0,m=0,B=0,k=0;k=C,C=C+16|0,m=k+1|0,B=k,d=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(d=n[(n[s>>2]|0)+d>>2]|0),XA(m,c),m=ZA(m,c)|0,Fu(B,f),f=+Ru(B,f),v7[d&15](s,m,f),C=k}function Cke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],wke(s,c,d,0),C=f}function wke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=HF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=Ike(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,Bke(m,f)|0,f),C=d}function HF(){var s=0,l=0;if(o[7776]|0||(UG(9828),ir(41,9828,U|0)|0,l=7776,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9828)|0)){s=9828,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));UG(9828)}return 9828}function Ike(s){return s=s|0,0}function Bke(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=HF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],OG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(vke(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function OG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function vke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Dke(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,Pke(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],OG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,Ske(s,k),bke(k),C=O;return}}function Dke(s){return s=s|0,357913941}function Pke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function Ske(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function bke(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function UG(s){s=s|0,Qke(s)}function xke(s){s=s|0,kke(s+24|0)}function kke(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function Qke(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,7,l,Fke()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Fke(){return 1312}function Rke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=Tke(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Nke(l,d,c),C=f}function Tke(s){return s=s|0,(n[(HF()|0)+24>>2]|0)+(s*12|0)|0}function Nke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),XA(d,c),d=ZA(d,c)|0,tf[f&31](s,d),C=m}function Lke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Mke(s,c,d,0),C=f}function Mke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=qF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=Oke(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,Uke(m,f)|0,f),C=d}function qF(){var s=0,l=0;if(o[7784]|0||(HG(9864),ir(42,9864,U|0)|0,l=7784,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9864)|0)){s=9864,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));HG(9864)}return 9864}function Oke(s){return s=s|0,0}function Uke(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=qF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],_G(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(_ke(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function _G(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function _ke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=Hke(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,qke(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],_G(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,jke(s,k),Gke(k),C=O;return}}function Hke(s){return s=s|0,357913941}function qke(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function jke(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function Gke(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function HG(s){s=s|0,Kke(s)}function Yke(s){s=s|0,Wke(s+24|0)}function Wke(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function Kke(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,8,l,Vke()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Vke(){return 1320}function zke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=Jke(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],Xke(l,d,c),C=f}function Jke(s){return s=s|0,(n[(qF()|0)+24>>2]|0)+(s*12|0)|0}function Xke(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),Zke(d,c),d=$ke(d,c)|0,tf[f&31](s,d),C=m}function Zke(s,l){s=s|0,l=l|0}function $ke(s,l){return s=s|0,l=l|0,eQe(l)|0}function eQe(s){return s=s|0,s|0}function tQe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],rQe(s,c,d,0),C=f}function rQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=jF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=nQe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,iQe(m,f)|0,f),C=d}function jF(){var s=0,l=0;if(o[7792]|0||(jG(9900),ir(43,9900,U|0)|0,l=7792,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9900)|0)){s=9900,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));jG(9900)}return 9900}function nQe(s){return s=s|0,0}function iQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=jF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],qG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(sQe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function qG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function sQe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=oQe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,aQe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],qG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,lQe(s,k),cQe(k),C=O;return}}function oQe(s){return s=s|0,357913941}function aQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function lQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function cQe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function jG(s){s=s|0,fQe(s)}function uQe(s){s=s|0,AQe(s+24|0)}function AQe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function fQe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,22,l,pQe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function pQe(){return 1344}function hQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0;c=C,C=C+16|0,f=c+8|0,d=c,m=gQe(s)|0,s=n[m+4>>2]|0,n[d>>2]=n[m>>2],n[d+4>>2]=s,n[f>>2]=n[d>>2],n[f+4>>2]=n[d+4>>2],dQe(l,f),C=c}function gQe(s){return s=s|0,(n[(jF()|0)+24>>2]|0)+(s*12|0)|0}function dQe(s,l){s=s|0,l=l|0;var c=0;c=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(c=n[(n[s>>2]|0)+c>>2]|0),ef[c&127](s)}function mQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=GF()|0,s=yQe(c)|0,hn(m,l,d,s,EQe(c,f)|0,f)}function GF(){var s=0,l=0;if(o[7800]|0||(YG(9936),ir(44,9936,U|0)|0,l=7800,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9936)|0)){s=9936,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));YG(9936)}return 9936}function yQe(s){return s=s|0,s|0}function EQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=GF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(GG(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(CQe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function GG(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function CQe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=wQe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,IQe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,GG(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,BQe(s,d),vQe(d),C=k;return}}function wQe(s){return s=s|0,536870911}function IQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function BQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function vQe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function YG(s){s=s|0,SQe(s)}function DQe(s){s=s|0,PQe(s+24|0)}function PQe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function SQe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,23,l,EG()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function bQe(s,l){s=s|0,l=l|0,kQe(n[(xQe(s)|0)>>2]|0,l)}function xQe(s){return s=s|0,(n[(GF()|0)+24>>2]|0)+(s<<3)|0}function kQe(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,kF(f,l),l=QF(f,l)|0,ef[s&127](l),C=c}function QQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=YF()|0,s=FQe(c)|0,hn(m,l,d,s,RQe(c,f)|0,f)}function YF(){var s=0,l=0;if(o[7808]|0||(KG(9972),ir(45,9972,U|0)|0,l=7808,n[l>>2]=1,n[l+4>>2]=0),!(Rr(9972)|0)){s=9972,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));KG(9972)}return 9972}function FQe(s){return s=s|0,s|0}function RQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=YF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(WG(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(TQe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function WG(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function TQe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=NQe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,LQe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,WG(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,MQe(s,d),OQe(d),C=k;return}}function NQe(s){return s=s|0,536870911}function LQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function MQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function OQe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function KG(s){s=s|0,HQe(s)}function UQe(s){s=s|0,_Qe(s+24|0)}function _Qe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function HQe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,9,l,qQe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function qQe(){return 1348}function jQe(s,l){return s=s|0,l=l|0,YQe(n[(GQe(s)|0)>>2]|0,l)|0}function GQe(s){return s=s|0,(n[(YF()|0)+24>>2]|0)+(s<<3)|0}function YQe(s,l){s=s|0,l=l|0;var c=0,f=0;return c=C,C=C+16|0,f=c,VG(f,l),l=zG(f,l)|0,l=qv(F0[s&31](l)|0)|0,C=c,l|0}function VG(s,l){s=s|0,l=l|0}function zG(s,l){return s=s|0,l=l|0,WQe(l)|0}function WQe(s){return s=s|0,s|0}function KQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=WF()|0,s=VQe(c)|0,hn(m,l,d,s,zQe(c,f)|0,f)}function WF(){var s=0,l=0;if(o[7816]|0||(XG(10008),ir(46,10008,U|0)|0,l=7816,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10008)|0)){s=10008,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));XG(10008)}return 10008}function VQe(s){return s=s|0,s|0}function zQe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=WF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(JG(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(JQe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function JG(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function JQe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=XQe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,ZQe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,JG(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,$Qe(s,d),eFe(d),C=k;return}}function XQe(s){return s=s|0,536870911}function ZQe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function $Qe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function eFe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function XG(s){s=s|0,nFe(s)}function tFe(s){s=s|0,rFe(s+24|0)}function rFe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function nFe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,15,l,pG()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function iFe(s){return s=s|0,oFe(n[(sFe(s)|0)>>2]|0)|0}function sFe(s){return s=s|0,(n[(WF()|0)+24>>2]|0)+(s<<3)|0}function oFe(s){return s=s|0,qv(nD[s&7]()|0)|0}function aFe(){var s=0;return o[7832]|0||(gFe(10052),ir(25,10052,U|0)|0,s=7832,n[s>>2]=1,n[s+4>>2]=0),10052}function lFe(s,l){s=s|0,l=l|0,n[s>>2]=cFe()|0,n[s+4>>2]=uFe()|0,n[s+12>>2]=l,n[s+8>>2]=AFe()|0,n[s+32>>2]=2}function cFe(){return 11709}function uFe(){return 1188}function AFe(){return Gv()|0}function fFe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(pFe(c),gt(c)):l|0&&(bu(l),gt(l))}function Dp(s,l){return s=s|0,l=l|0,l&s|0}function pFe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function Gv(){var s=0;return o[7824]|0||(n[2511]=hFe()|0,n[2512]=0,s=7824,n[s>>2]=1,n[s+4>>2]=0),10044}function hFe(){return 0}function gFe(s){s=s|0,wp(s)}function dFe(s){s=s|0;var l=0,c=0,f=0,d=0,m=0;l=C,C=C+32|0,c=l+24|0,m=l+16|0,d=l+8|0,f=l,mFe(s,4827),yFe(s,4834,3)|0,EFe(s,3682,47)|0,n[m>>2]=9,n[m+4>>2]=0,n[c>>2]=n[m>>2],n[c+4>>2]=n[m+4>>2],CFe(s,4841,c)|0,n[d>>2]=1,n[d+4>>2]=0,n[c>>2]=n[d>>2],n[c+4>>2]=n[d+4>>2],wFe(s,4871,c)|0,n[f>>2]=10,n[f+4>>2]=0,n[c>>2]=n[f>>2],n[c+4>>2]=n[f+4>>2],IFe(s,4891,c)|0,C=l}function mFe(s,l){s=s|0,l=l|0;var c=0;c=eTe()|0,n[s>>2]=c,tTe(c,l),Pp(n[s>>2]|0)}function yFe(s,l,c){return s=s|0,l=l|0,c=c|0,ORe(s,pn(l)|0,c,0),s|0}function EFe(s,l,c){return s=s|0,l=l|0,c=c|0,BRe(s,pn(l)|0,c,0),s|0}function CFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],iRe(s,l,d),C=f,s|0}function wFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],UFe(s,l,d),C=f,s|0}function IFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=n[c+4>>2]|0,n[m>>2]=n[c>>2],n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],BFe(s,l,d),C=f,s|0}function BFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],vFe(s,c,d,1),C=f}function vFe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=KF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=DFe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,PFe(m,f)|0,f),C=d}function KF(){var s=0,l=0;if(o[7840]|0||($G(10100),ir(48,10100,U|0)|0,l=7840,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10100)|0)){s=10100,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));$G(10100)}return 10100}function DFe(s){return s=s|0,0}function PFe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=KF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],ZG(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(SFe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function ZG(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function SFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=bFe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,xFe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],ZG(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,kFe(s,k),QFe(k),C=O;return}}function bFe(s){return s=s|0,357913941}function xFe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function kFe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function QFe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function $G(s){s=s|0,TFe(s)}function FFe(s){s=s|0,RFe(s+24|0)}function RFe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function TFe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,6,l,NFe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function NFe(){return 1364}function LFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;return f=C,C=C+16|0,d=f+8|0,m=f,B=MFe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],c=OFe(l,d,c)|0,C=f,c|0}function MFe(s){return s=s|0,(n[(KF()|0)+24>>2]|0)+(s*12|0)|0}function OFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;return m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),XA(d,c),d=ZA(d,c)|0,d=oG(IR[f&15](s,d)|0)|0,C=m,d|0}function UFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],_Fe(s,c,d,0),C=f}function _Fe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=VF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=HFe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,qFe(m,f)|0,f),C=d}function VF(){var s=0,l=0;if(o[7848]|0||(t9(10136),ir(49,10136,U|0)|0,l=7848,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10136)|0)){s=10136,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));t9(10136)}return 10136}function HFe(s){return s=s|0,0}function qFe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=VF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],e9(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(jFe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function e9(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function jFe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=GFe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,YFe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],e9(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,WFe(s,k),KFe(k),C=O;return}}function GFe(s){return s=s|0,357913941}function YFe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function WFe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function KFe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function t9(s){s=s|0,JFe(s)}function VFe(s){s=s|0,zFe(s+24|0)}function zFe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function JFe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,9,l,XFe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function XFe(){return 1372}function ZFe(s,l,c){s=s|0,l=l|0,c=+c;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,d=f+8|0,m=f,B=$Fe(s)|0,s=n[B+4>>2]|0,n[m>>2]=n[B>>2],n[m+4>>2]=s,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],eRe(l,d,c),C=f}function $Fe(s){return s=s|0,(n[(VF()|0)+24>>2]|0)+(s*12|0)|0}function eRe(s,l,c){s=s|0,l=l|0,c=+c;var f=0,d=0,m=0,B=Xe;m=C,C=C+16|0,d=m,f=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(f=n[(n[s>>2]|0)+f>>2]|0),tRe(d,c),B=y(rRe(d,c)),g7[f&1](s,B),C=m}function tRe(s,l){s=s|0,l=+l}function rRe(s,l){return s=s|0,l=+l,y(nRe(l))}function nRe(s){return s=+s,y(s)}function iRe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,d=f+8|0,m=f,k=n[c>>2]|0,B=n[c+4>>2]|0,c=pn(l)|0,n[m>>2]=k,n[m+4>>2]=B,n[d>>2]=n[m>>2],n[d+4>>2]=n[m+4>>2],sRe(s,c,d,0),C=f}function sRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0,Q=0,O=0,M=0;d=C,C=C+32|0,m=d+16|0,M=d+8|0,k=d,O=n[c>>2]|0,Q=n[c+4>>2]|0,B=n[s>>2]|0,s=zF()|0,n[M>>2]=O,n[M+4>>2]=Q,n[m>>2]=n[M>>2],n[m+4>>2]=n[M+4>>2],c=oRe(m)|0,n[k>>2]=O,n[k+4>>2]=Q,n[m>>2]=n[k>>2],n[m+4>>2]=n[k+4>>2],hn(B,l,s,c,aRe(m,f)|0,f),C=d}function zF(){var s=0,l=0;if(o[7856]|0||(n9(10172),ir(50,10172,U|0)|0,l=7856,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10172)|0)){s=10172,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));n9(10172)}return 10172}function oRe(s){return s=s|0,0}function aRe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0;return M=C,C=C+32|0,d=M+24|0,B=M+16|0,k=M,Q=M+8|0,m=n[s>>2]|0,f=n[s+4>>2]|0,n[k>>2]=m,n[k+4>>2]=f,j=zF()|0,O=j+24|0,s=gr(l,4)|0,n[Q>>2]=s,l=j+28|0,c=n[l>>2]|0,c>>>0<(n[j+32>>2]|0)>>>0?(n[B>>2]=m,n[B+4>>2]=f,n[d>>2]=n[B>>2],n[d+4>>2]=n[B+4>>2],r9(c,d,s),s=(n[l>>2]|0)+12|0,n[l>>2]=s):(lRe(O,k,Q),s=n[l>>2]|0),C=M,((s-(n[O>>2]|0)|0)/12|0)+-1|0}function r9(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=n[l+4>>2]|0,n[s>>2]=n[l>>2],n[s+4>>2]=f,n[s+8>>2]=c}function lRe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;if(O=C,C=C+48|0,f=O+32|0,B=O+24|0,k=O,Q=s+4|0,d=(((n[Q>>2]|0)-(n[s>>2]|0)|0)/12|0)+1|0,m=cRe(s)|0,m>>>0>>0)Jr(s);else{M=n[s>>2]|0,se=((n[s+8>>2]|0)-M|0)/12|0,j=se<<1,uRe(k,se>>>0>>1>>>0?j>>>0>>0?d:j:m,((n[Q>>2]|0)-M|0)/12|0,s+8|0),Q=k+8|0,m=n[Q>>2]|0,d=n[l+4>>2]|0,c=n[c>>2]|0,n[B>>2]=n[l>>2],n[B+4>>2]=d,n[f>>2]=n[B>>2],n[f+4>>2]=n[B+4>>2],r9(m,f,c),n[Q>>2]=(n[Q>>2]|0)+12,ARe(s,k),fRe(k),C=O;return}}function cRe(s){return s=s|0,357913941}function uRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>357913941)Tt();else{d=Kt(l*12|0)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c*12|0)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l*12|0)}function ARe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(((d|0)/-12|0)*12|0)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function fRe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~(((f+-12-l|0)>>>0)/12|0)*12|0)),s=n[s>>2]|0,s|0&>(s)}function n9(s){s=s|0,gRe(s)}function pRe(s){s=s|0,hRe(s+24|0)}function hRe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~(((l+-12-f|0)>>>0)/12|0)*12|0)),gt(c))}function gRe(s){s=s|0;var l=0;l=Vr()|0,zr(s,2,3,l,dRe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function dRe(){return 1380}function mRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+16|0,m=d+8|0,B=d,k=yRe(s)|0,s=n[k+4>>2]|0,n[B>>2]=n[k>>2],n[B+4>>2]=s,n[m>>2]=n[B>>2],n[m+4>>2]=n[B+4>>2],ERe(l,m,c,f),C=d}function yRe(s){return s=s|0,(n[(zF()|0)+24>>2]|0)+(s*12|0)|0}function ERe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;k=C,C=C+16|0,m=k+1|0,B=k,d=n[l>>2]|0,l=n[l+4>>2]|0,s=s+(l>>1)|0,l&1&&(d=n[(n[s>>2]|0)+d>>2]|0),XA(m,c),m=ZA(m,c)|0,CRe(B,f),B=wRe(B,f)|0,vw[d&15](s,m,B),C=k}function CRe(s,l){s=s|0,l=l|0}function wRe(s,l){return s=s|0,l=l|0,IRe(l)|0}function IRe(s){return s=s|0,(s|0)!=0|0}function BRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=JF()|0,s=vRe(c)|0,hn(m,l,d,s,DRe(c,f)|0,f)}function JF(){var s=0,l=0;if(o[7864]|0||(s9(10208),ir(51,10208,U|0)|0,l=7864,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10208)|0)){s=10208,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));s9(10208)}return 10208}function vRe(s){return s=s|0,s|0}function DRe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=JF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(i9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(PRe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function i9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function PRe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=SRe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,bRe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,i9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,xRe(s,d),kRe(d),C=k;return}}function SRe(s){return s=s|0,536870911}function bRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function xRe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function kRe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function s9(s){s=s|0,RRe(s)}function QRe(s){s=s|0,FRe(s+24|0)}function FRe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function RRe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,24,l,TRe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function TRe(){return 1392}function NRe(s,l){s=s|0,l=l|0,MRe(n[(LRe(s)|0)>>2]|0,l)}function LRe(s){return s=s|0,(n[(JF()|0)+24>>2]|0)+(s<<3)|0}function MRe(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,VG(f,l),l=zG(f,l)|0,ef[s&127](l),C=c}function ORe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=XF()|0,s=URe(c)|0,hn(m,l,d,s,_Re(c,f)|0,f)}function XF(){var s=0,l=0;if(o[7872]|0||(a9(10244),ir(52,10244,U|0)|0,l=7872,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10244)|0)){s=10244,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));a9(10244)}return 10244}function URe(s){return s=s|0,s|0}function _Re(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=XF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(o9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(HRe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function o9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function HRe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=qRe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,jRe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,o9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,GRe(s,d),YRe(d),C=k;return}}function qRe(s){return s=s|0,536870911}function jRe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function GRe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function YRe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function a9(s){s=s|0,VRe(s)}function WRe(s){s=s|0,KRe(s+24|0)}function KRe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function VRe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,16,l,zRe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function zRe(){return 1400}function JRe(s){return s=s|0,ZRe(n[(XRe(s)|0)>>2]|0)|0}function XRe(s){return s=s|0,(n[(XF()|0)+24>>2]|0)+(s<<3)|0}function ZRe(s){return s=s|0,$Re(nD[s&7]()|0)|0}function $Re(s){return s=s|0,s|0}function eTe(){var s=0;return o[7880]|0||(aTe(10280),ir(25,10280,U|0)|0,s=7880,n[s>>2]=1,n[s+4>>2]=0),10280}function tTe(s,l){s=s|0,l=l|0,n[s>>2]=rTe()|0,n[s+4>>2]=nTe()|0,n[s+12>>2]=l,n[s+8>>2]=iTe()|0,n[s+32>>2]=4}function rTe(){return 11711}function nTe(){return 1356}function iTe(){return Gv()|0}function sTe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(oTe(c),gt(c)):l|0&&(w0(l),gt(l))}function oTe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function aTe(s){s=s|0,wp(s)}function lTe(s){s=s|0,cTe(s,4920),uTe(s)|0,ATe(s)|0}function cTe(s,l){s=s|0,l=l|0;var c=0;c=xG()|0,n[s>>2]=c,FTe(c,l),Pp(n[s>>2]|0)}function uTe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,ITe()|0),s|0}function ATe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,fTe()|0),s|0}function fTe(){var s=0;return o[7888]|0||(l9(10328),ir(53,10328,U|0)|0,s=7888,n[s>>2]=1,n[s+4>>2]=0),Rr(10328)|0||l9(10328),10328}function P0(s,l){s=s|0,l=l|0,hn(s,0,l,0,0,0)}function l9(s){s=s|0,gTe(s),S0(s,10)}function pTe(s){s=s|0,hTe(s+24|0)}function hTe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function gTe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,1,l,ETe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function dTe(s,l,c){s=s|0,l=l|0,c=+c,mTe(s,l,c)}function S0(s,l){s=s|0,l=l|0,n[s+20>>2]=l}function mTe(s,l,c){s=s|0,l=l|0,c=+c;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+16|0,m=f+8|0,k=f+13|0,d=f,B=f+12|0,XA(k,l),n[m>>2]=ZA(k,l)|0,Fu(B,c),E[d>>3]=+Ru(B,c),yTe(s,m,d),C=f}function yTe(s,l,c){s=s|0,l=l|0,c=c|0,W(s+8|0,n[l>>2]|0,+E[c>>3]),o[s+24>>0]=1}function ETe(){return 1404}function CTe(s,l){return s=s|0,l=+l,wTe(s,l)|0}function wTe(s,l){s=s|0,l=+l;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return f=C,C=C+16|0,m=f+4|0,B=f+8|0,k=f,d=Va(8)|0,c=d,Q=Kt(16)|0,XA(m,s),s=ZA(m,s)|0,Fu(B,l),W(Q,s,+Ru(B,l)),B=c+4|0,n[B>>2]=Q,s=Kt(8)|0,B=n[B>>2]|0,n[k>>2]=0,n[m>>2]=n[k>>2],LF(s,B,m),n[d>>2]=s,C=f,c|0}function ITe(){var s=0;return o[7896]|0||(c9(10364),ir(54,10364,U|0)|0,s=7896,n[s>>2]=1,n[s+4>>2]=0),Rr(10364)|0||c9(10364),10364}function c9(s){s=s|0,DTe(s),S0(s,55)}function BTe(s){s=s|0,vTe(s+24|0)}function vTe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function DTe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,4,l,xTe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function PTe(s){s=s|0,STe(s)}function STe(s){s=s|0,bTe(s)}function bTe(s){s=s|0,u9(s+8|0),o[s+24>>0]=1}function u9(s){s=s|0,n[s>>2]=0,E[s+8>>3]=0}function xTe(){return 1424}function kTe(){return QTe()|0}function QTe(){var s=0,l=0,c=0,f=0,d=0,m=0,B=0;return l=C,C=C+16|0,d=l+4|0,B=l,c=Va(8)|0,s=c,f=Kt(16)|0,u9(f),m=s+4|0,n[m>>2]=f,f=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],LF(f,m,d),n[c>>2]=f,C=l,s|0}function FTe(s,l){s=s|0,l=l|0,n[s>>2]=RTe()|0,n[s+4>>2]=TTe()|0,n[s+12>>2]=l,n[s+8>>2]=NTe()|0,n[s+32>>2]=5}function RTe(){return 11710}function TTe(){return 1416}function NTe(){return Yv()|0}function LTe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(MTe(c),gt(c)):l|0&>(l)}function MTe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function Yv(){var s=0;return o[7904]|0||(n[2600]=OTe()|0,n[2601]=0,s=7904,n[s>>2]=1,n[s+4>>2]=0),10400}function OTe(){return n[357]|0}function UTe(s){s=s|0,_Te(s,4926),HTe(s)|0}function _Te(s,l){s=s|0,l=l|0;var c=0;c=Z5()|0,n[s>>2]=c,ZTe(c,l),Pp(n[s>>2]|0)}function HTe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,qTe()|0),s|0}function qTe(){var s=0;return o[7912]|0||(A9(10412),ir(56,10412,U|0)|0,s=7912,n[s>>2]=1,n[s+4>>2]=0),Rr(10412)|0||A9(10412),10412}function A9(s){s=s|0,YTe(s),S0(s,57)}function jTe(s){s=s|0,GTe(s+24|0)}function GTe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function YTe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,5,l,zTe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function WTe(s){s=s|0,KTe(s)}function KTe(s){s=s|0,VTe(s)}function VTe(s){s=s|0;var l=0,c=0;l=s+8|0,c=l+48|0;do n[l>>2]=0,l=l+4|0;while((l|0)<(c|0));o[s+56>>0]=1}function zTe(){return 1432}function JTe(){return XTe()|0}function XTe(){var s=0,l=0,c=0,f=0,d=0,m=0,B=0,k=0;B=C,C=C+16|0,s=B+4|0,l=B,c=Va(8)|0,f=c,d=Kt(48)|0,m=d,k=m+48|0;do n[m>>2]=0,m=m+4|0;while((m|0)<(k|0));return m=f+4|0,n[m>>2]=d,k=Kt(8)|0,m=n[m>>2]|0,n[l>>2]=0,n[s>>2]=n[l>>2],$5(k,m,s),n[c>>2]=k,C=B,f|0}function ZTe(s,l){s=s|0,l=l|0,n[s>>2]=$Te()|0,n[s+4>>2]=eNe()|0,n[s+12>>2]=l,n[s+8>>2]=tNe()|0,n[s+32>>2]=6}function $Te(){return 11704}function eNe(){return 1436}function tNe(){return Yv()|0}function rNe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(nNe(c),gt(c)):l|0&>(l)}function nNe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function iNe(s){s=s|0,sNe(s,4933),oNe(s)|0,aNe(s)|0}function sNe(s,l){s=s|0,l=l|0;var c=0;c=QNe()|0,n[s>>2]=c,FNe(c,l),Pp(n[s>>2]|0)}function oNe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,wNe()|0),s|0}function aNe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,lNe()|0),s|0}function lNe(){var s=0;return o[7920]|0||(f9(10452),ir(58,10452,U|0)|0,s=7920,n[s>>2]=1,n[s+4>>2]=0),Rr(10452)|0||f9(10452),10452}function f9(s){s=s|0,ANe(s),S0(s,1)}function cNe(s){s=s|0,uNe(s+24|0)}function uNe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function ANe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,1,l,gNe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function fNe(s,l,c){s=s|0,l=+l,c=+c,pNe(s,l,c)}function pNe(s,l,c){s=s|0,l=+l,c=+c;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+32|0,m=f+8|0,k=f+17|0,d=f,B=f+16|0,Fu(k,l),E[m>>3]=+Ru(k,l),Fu(B,c),E[d>>3]=+Ru(B,c),hNe(s,m,d),C=f}function hNe(s,l,c){s=s|0,l=l|0,c=c|0,p9(s+8|0,+E[l>>3],+E[c>>3]),o[s+24>>0]=1}function p9(s,l,c){s=s|0,l=+l,c=+c,E[s>>3]=l,E[s+8>>3]=c}function gNe(){return 1472}function dNe(s,l){return s=+s,l=+l,mNe(s,l)|0}function mNe(s,l){s=+s,l=+l;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return f=C,C=C+16|0,B=f+4|0,k=f+8|0,Q=f,d=Va(8)|0,c=d,m=Kt(16)|0,Fu(B,s),s=+Ru(B,s),Fu(k,l),p9(m,s,+Ru(k,l)),k=c+4|0,n[k>>2]=m,m=Kt(8)|0,k=n[k>>2]|0,n[Q>>2]=0,n[B>>2]=n[Q>>2],h9(m,k,B),n[d>>2]=m,C=f,c|0}function h9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,c=Kt(16)|0,n[c+4>>2]=0,n[c+8>>2]=0,n[c>>2]=1452,n[c+12>>2]=l,n[s+4>>2]=c}function yNe(s){s=s|0,Md(s),gt(s)}function ENe(s){s=s|0,s=n[s+12>>2]|0,s|0&>(s)}function CNe(s){s=s|0,gt(s)}function wNe(){var s=0;return o[7928]|0||(g9(10488),ir(59,10488,U|0)|0,s=7928,n[s>>2]=1,n[s+4>>2]=0),Rr(10488)|0||g9(10488),10488}function g9(s){s=s|0,vNe(s),S0(s,60)}function INe(s){s=s|0,BNe(s+24|0)}function BNe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function vNe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,6,l,bNe()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function DNe(s){s=s|0,PNe(s)}function PNe(s){s=s|0,SNe(s)}function SNe(s){s=s|0,d9(s+8|0),o[s+24>>0]=1}function d9(s){s=s|0,n[s>>2]=0,n[s+4>>2]=0,n[s+8>>2]=0,n[s+12>>2]=0}function bNe(){return 1492}function xNe(){return kNe()|0}function kNe(){var s=0,l=0,c=0,f=0,d=0,m=0,B=0;return l=C,C=C+16|0,d=l+4|0,B=l,c=Va(8)|0,s=c,f=Kt(16)|0,d9(f),m=s+4|0,n[m>>2]=f,f=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],h9(f,m,d),n[c>>2]=f,C=l,s|0}function QNe(){var s=0;return o[7936]|0||(ONe(10524),ir(25,10524,U|0)|0,s=7936,n[s>>2]=1,n[s+4>>2]=0),10524}function FNe(s,l){s=s|0,l=l|0,n[s>>2]=RNe()|0,n[s+4>>2]=TNe()|0,n[s+12>>2]=l,n[s+8>>2]=NNe()|0,n[s+32>>2]=7}function RNe(){return 11700}function TNe(){return 1484}function NNe(){return Yv()|0}function LNe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(MNe(c),gt(c)):l|0&>(l)}function MNe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function ONe(s){s=s|0,wp(s)}function UNe(s,l,c){s=s|0,l=l|0,c=c|0,s=pn(l)|0,l=_Ne(c)|0,c=HNe(c,0)|0,mLe(s,l,c,ZF()|0,0)}function _Ne(s){return s=s|0,s|0}function HNe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=ZF()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(y9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(VNe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function ZF(){var s=0,l=0;if(o[7944]|0||(m9(10568),ir(61,10568,U|0)|0,l=7944,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10568)|0)){s=10568,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));m9(10568)}return 10568}function m9(s){s=s|0,GNe(s)}function qNe(s){s=s|0,jNe(s+24|0)}function jNe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function GNe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,17,l,dG()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function YNe(s){return s=s|0,KNe(n[(WNe(s)|0)>>2]|0)|0}function WNe(s){return s=s|0,(n[(ZF()|0)+24>>2]|0)+(s<<3)|0}function KNe(s){return s=s|0,jv(nD[s&7]()|0)|0}function y9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function VNe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=zNe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,JNe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,y9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,XNe(s,d),ZNe(d),C=k;return}}function zNe(s){return s=s|0,536870911}function JNe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function XNe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function ZNe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function $Ne(){eLe()}function eLe(){tLe(10604)}function tLe(s){s=s|0,rLe(s,4955)}function rLe(s,l){s=s|0,l=l|0;var c=0;c=nLe()|0,n[s>>2]=c,iLe(c,l),Pp(n[s>>2]|0)}function nLe(){var s=0;return o[7952]|0||(pLe(10612),ir(25,10612,U|0)|0,s=7952,n[s>>2]=1,n[s+4>>2]=0),10612}function iLe(s,l){s=s|0,l=l|0,n[s>>2]=lLe()|0,n[s+4>>2]=cLe()|0,n[s+12>>2]=l,n[s+8>>2]=uLe()|0,n[s+32>>2]=8}function Pp(s){s=s|0;var l=0,c=0;l=C,C=C+16|0,c=l,Fd()|0,n[c>>2]=s,sLe(10608,c),C=l}function Fd(){return o[11714]|0||(n[2652]=0,ir(62,10608,U|0)|0,o[11714]=1),10608}function sLe(s,l){s=s|0,l=l|0;var c=0;c=Kt(8)|0,n[c+4>>2]=n[l>>2],n[c>>2]=n[s>>2],n[s>>2]=c}function oLe(s){s=s|0,aLe(s)}function aLe(s){s=s|0;var l=0,c=0;if(l=n[s>>2]|0,l|0)do c=l,l=n[l>>2]|0,gt(c);while(l|0);n[s>>2]=0}function lLe(){return 11715}function cLe(){return 1496}function uLe(){return Gv()|0}function ALe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(fLe(c),gt(c)):l|0&>(l)}function fLe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function pLe(s){s=s|0,wp(s)}function hLe(s,l){s=s|0,l=l|0;var c=0,f=0;Fd()|0,c=n[2652]|0;e:do if(c|0){for(;f=n[c+4>>2]|0,!(f|0&&!($9($F(f)|0,s)|0));)if(c=n[c>>2]|0,!c)break e;gLe(f,l)}while(!1)}function $F(s){return s=s|0,n[s+12>>2]|0}function gLe(s,l){s=s|0,l=l|0;var c=0;s=s+36|0,c=n[s>>2]|0,c|0&&(qA(c),gt(c)),c=Kt(4)|0,W5(c,l),n[s>>2]=c}function eR(){return o[11716]|0||(n[2664]=0,ir(63,10656,U|0)|0,o[11716]=1),10656}function E9(){var s=0;return o[11717]|0?s=n[2665]|0:(dLe(),n[2665]=1504,o[11717]=1,s=1504),s|0}function dLe(){o[11740]|0||(o[11718]=gr(gr(8,0)|0,0)|0,o[11719]=gr(gr(0,0)|0,0)|0,o[11720]=gr(gr(0,16)|0,0)|0,o[11721]=gr(gr(8,0)|0,0)|0,o[11722]=gr(gr(0,0)|0,0)|0,o[11723]=gr(gr(8,0)|0,0)|0,o[11724]=gr(gr(0,0)|0,0)|0,o[11725]=gr(gr(8,0)|0,0)|0,o[11726]=gr(gr(0,0)|0,0)|0,o[11727]=gr(gr(8,0)|0,0)|0,o[11728]=gr(gr(0,0)|0,0)|0,o[11729]=gr(gr(0,0)|0,32)|0,o[11730]=gr(gr(0,0)|0,32)|0,o[11740]=1)}function C9(){return 1572}function mLe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0,M=0;m=C,C=C+32|0,M=m+16|0,O=m+12|0,Q=m+8|0,k=m+4|0,B=m,n[M>>2]=s,n[O>>2]=l,n[Q>>2]=c,n[k>>2]=f,n[B>>2]=d,eR()|0,yLe(10656,M,O,Q,k,B),C=m}function yLe(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0;B=Kt(24)|0,z5(B+4|0,n[l>>2]|0,n[c>>2]|0,n[f>>2]|0,n[d>>2]|0,n[m>>2]|0),n[B>>2]=n[s>>2],n[s>>2]=B}function w9(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0,lt=0;if(lt=C,C=C+32|0,Oe=lt+20|0,Qe=lt+8|0,$e=lt+4|0,Je=lt,l=n[l>>2]|0,l|0){je=Oe+4|0,Q=Oe+8|0,O=Qe+4|0,M=Qe+8|0,j=Qe+8|0,se=Oe+8|0;do{if(B=l+4|0,k=tR(B)|0,k|0){if(d=yw(k)|0,n[Oe>>2]=0,n[je>>2]=0,n[Q>>2]=0,f=(Ew(k)|0)+1|0,ELe(Oe,f),f|0)for(;f=f+-1|0,xc(Qe,n[d>>2]|0),m=n[je>>2]|0,m>>>0<(n[se>>2]|0)>>>0?(n[m>>2]=n[Qe>>2],n[je>>2]=(n[je>>2]|0)+4):rR(Oe,Qe),f;)d=d+4|0;f=Cw(k)|0,n[Qe>>2]=0,n[O>>2]=0,n[M>>2]=0;e:do if(n[f>>2]|0)for(d=0,m=0;;){if((d|0)==(m|0)?CLe(Qe,f):(n[d>>2]=n[f>>2],n[O>>2]=(n[O>>2]|0)+4),f=f+4|0,!(n[f>>2]|0))break e;d=n[O>>2]|0,m=n[j>>2]|0}while(!1);n[$e>>2]=Wv(B)|0,n[Je>>2]=Rr(k)|0,wLe(c,s,$e,Je,Oe,Qe),nR(Qe),$A(Oe)}l=n[l>>2]|0}while(l|0)}C=lt}function tR(s){return s=s|0,n[s+12>>2]|0}function yw(s){return s=s|0,n[s+12>>2]|0}function Ew(s){return s=s|0,n[s+16>>2]|0}function ELe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;d=C,C=C+32|0,c=d,f=n[s>>2]|0,(n[s+8>>2]|0)-f>>2>>>0>>0&&(x9(c,l,(n[s+4>>2]|0)-f>>2,s+8|0),k9(s,c),Q9(c)),C=d}function rR(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0;if(B=C,C=C+32|0,c=B,f=s+4|0,d=((n[f>>2]|0)-(n[s>>2]|0)>>2)+1|0,m=b9(s)|0,m>>>0>>0)Jr(s);else{k=n[s>>2]|0,O=(n[s+8>>2]|0)-k|0,Q=O>>1,x9(c,O>>2>>>0>>1>>>0?Q>>>0>>0?d:Q:m,(n[f>>2]|0)-k>>2,s+8|0),m=c+8|0,n[n[m>>2]>>2]=n[l>>2],n[m>>2]=(n[m>>2]|0)+4,k9(s,c),Q9(c),C=B;return}}function Cw(s){return s=s|0,n[s+8>>2]|0}function CLe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0;if(B=C,C=C+32|0,c=B,f=s+4|0,d=((n[f>>2]|0)-(n[s>>2]|0)>>2)+1|0,m=S9(s)|0,m>>>0>>0)Jr(s);else{k=n[s>>2]|0,O=(n[s+8>>2]|0)-k|0,Q=O>>1,_Le(c,O>>2>>>0>>1>>>0?Q>>>0>>0?d:Q:m,(n[f>>2]|0)-k>>2,s+8|0),m=c+8|0,n[n[m>>2]>>2]=n[l>>2],n[m>>2]=(n[m>>2]|0)+4,HLe(s,c),qLe(c),C=B;return}}function Wv(s){return s=s|0,n[s>>2]|0}function wLe(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,ILe(s,l,c,f,d,m)}function nR(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-4-f|0)>>>2)<<2)),gt(c))}function $A(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-4-f|0)>>>2)<<2)),gt(c))}function ILe(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0,Q=0,O=0,M=0,j=0;B=C,C=C+48|0,M=B+40|0,k=B+32|0,j=B+24|0,Q=B+12|0,O=B,za(k),s=ya(s)|0,n[j>>2]=n[l>>2],c=n[c>>2]|0,f=n[f>>2]|0,iR(Q,d),BLe(O,m),n[M>>2]=n[j>>2],vLe(s,M,c,f,Q,O),nR(O),$A(Q),Ja(k),C=B}function iR(s,l){s=s|0,l=l|0;var c=0,f=0;n[s>>2]=0,n[s+4>>2]=0,n[s+8>>2]=0,c=l+4|0,f=(n[c>>2]|0)-(n[l>>2]|0)>>2,f|0&&(OLe(s,f),ULe(s,n[l>>2]|0,n[c>>2]|0,f))}function BLe(s,l){s=s|0,l=l|0;var c=0,f=0;n[s>>2]=0,n[s+4>>2]=0,n[s+8>>2]=0,c=l+4|0,f=(n[c>>2]|0)-(n[l>>2]|0)>>2,f|0&&(LLe(s,f),MLe(s,n[l>>2]|0,n[c>>2]|0,f))}function vLe(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0,Q=0,O=0,M=0,j=0;B=C,C=C+32|0,M=B+28|0,j=B+24|0,k=B+12|0,Q=B,O=Pl(DLe()|0)|0,n[j>>2]=n[l>>2],n[M>>2]=n[j>>2],l=b0(M)|0,c=I9(c)|0,f=sR(f)|0,n[k>>2]=n[d>>2],M=d+4|0,n[k+4>>2]=n[M>>2],j=d+8|0,n[k+8>>2]=n[j>>2],n[j>>2]=0,n[M>>2]=0,n[d>>2]=0,d=oR(k)|0,n[Q>>2]=n[m>>2],M=m+4|0,n[Q+4>>2]=n[M>>2],j=m+8|0,n[Q+8>>2]=n[j>>2],n[j>>2]=0,n[M>>2]=0,n[m>>2]=0,ao(0,O|0,s|0,l|0,c|0,f|0,d|0,PLe(Q)|0)|0,nR(Q),$A(k),C=B}function DLe(){var s=0;return o[7968]|0||(TLe(10708),s=7968,n[s>>2]=1,n[s+4>>2]=0),10708}function b0(s){return s=s|0,v9(s)|0}function I9(s){return s=s|0,B9(s)|0}function sR(s){return s=s|0,jv(s)|0}function oR(s){return s=s|0,bLe(s)|0}function PLe(s){return s=s|0,SLe(s)|0}function SLe(s){s=s|0;var l=0,c=0,f=0;if(f=(n[s+4>>2]|0)-(n[s>>2]|0)|0,c=f>>2,f=Va(f+4|0)|0,n[f>>2]=c,c|0){l=0;do n[f+4+(l<<2)>>2]=B9(n[(n[s>>2]|0)+(l<<2)>>2]|0)|0,l=l+1|0;while((l|0)!=(c|0))}return f|0}function B9(s){return s=s|0,s|0}function bLe(s){s=s|0;var l=0,c=0,f=0;if(f=(n[s+4>>2]|0)-(n[s>>2]|0)|0,c=f>>2,f=Va(f+4|0)|0,n[f>>2]=c,c|0){l=0;do n[f+4+(l<<2)>>2]=v9((n[s>>2]|0)+(l<<2)|0)|0,l=l+1|0;while((l|0)!=(c|0))}return f|0}function v9(s){s=s|0;var l=0,c=0,f=0,d=0;return d=C,C=C+32|0,l=d+12|0,c=d,f=yF(D9()|0)|0,f?(EF(l,f),CF(c,l),uUe(s,c),s=wF(l)|0):s=xLe(s)|0,C=d,s|0}function D9(){var s=0;return o[7960]|0||(RLe(10664),ir(25,10664,U|0)|0,s=7960,n[s>>2]=1,n[s+4>>2]=0),10664}function xLe(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0;return c=C,C=C+16|0,d=c+4|0,B=c,f=Va(8)|0,l=f,k=Kt(4)|0,n[k>>2]=n[s>>2],m=l+4|0,n[m>>2]=k,s=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],P9(s,m,d),n[f>>2]=s,C=c,l|0}function P9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,c=Kt(16)|0,n[c+4>>2]=0,n[c+8>>2]=0,n[c>>2]=1656,n[c+12>>2]=l,n[s+4>>2]=c}function kLe(s){s=s|0,Md(s),gt(s)}function QLe(s){s=s|0,s=n[s+12>>2]|0,s|0&>(s)}function FLe(s){s=s|0,gt(s)}function RLe(s){s=s|0,wp(s)}function TLe(s){s=s|0,Sl(s,NLe()|0,5)}function NLe(){return 1676}function LLe(s,l){s=s|0,l=l|0;var c=0;if((S9(s)|0)>>>0>>0&&Jr(s),l>>>0>1073741823)Tt();else{c=Kt(l<<2)|0,n[s+4>>2]=c,n[s>>2]=c,n[s+8>>2]=c+(l<<2);return}}function MLe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,f=s+4|0,s=c-l|0,(s|0)>0&&(Dr(n[f>>2]|0,l|0,s|0)|0,n[f>>2]=(n[f>>2]|0)+(s>>>2<<2))}function S9(s){return s=s|0,1073741823}function OLe(s,l){s=s|0,l=l|0;var c=0;if((b9(s)|0)>>>0>>0&&Jr(s),l>>>0>1073741823)Tt();else{c=Kt(l<<2)|0,n[s+4>>2]=c,n[s>>2]=c,n[s+8>>2]=c+(l<<2);return}}function ULe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,f=s+4|0,s=c-l|0,(s|0)>0&&(Dr(n[f>>2]|0,l|0,s|0)|0,n[f>>2]=(n[f>>2]|0)+(s>>>2<<2))}function b9(s){return s=s|0,1073741823}function _Le(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>1073741823)Tt();else{d=Kt(l<<2)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<2)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<2)}function HLe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>2)<<2)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function qLe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-4-l|0)>>>2)<<2)),s=n[s>>2]|0,s|0&>(s)}function x9(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>1073741823)Tt();else{d=Kt(l<<2)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<2)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<2)}function k9(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>2)<<2)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function Q9(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-4-l|0)>>>2)<<2)),s=n[s>>2]|0,s|0&>(s)}function jLe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0;if(Qe=C,C=C+32|0,M=Qe+20|0,j=Qe+12|0,O=Qe+16|0,se=Qe+4|0,je=Qe,Oe=Qe+8|0,k=E9()|0,m=n[k>>2]|0,B=n[m>>2]|0,B|0)for(Q=n[k+8>>2]|0,k=n[k+4>>2]|0;xc(M,B),GLe(s,M,k,Q),m=m+4|0,B=n[m>>2]|0,B;)Q=Q+1|0,k=k+1|0;if(m=C9()|0,B=n[m>>2]|0,B|0)do xc(M,B),n[j>>2]=n[m+4>>2],YLe(l,M,j),m=m+8|0,B=n[m>>2]|0;while(B|0);if(m=n[(Fd()|0)>>2]|0,m|0)do l=n[m+4>>2]|0,xc(M,n[(Rd(l)|0)>>2]|0),n[j>>2]=$F(l)|0,WLe(c,M,j),m=n[m>>2]|0;while(m|0);if(xc(O,0),m=eR()|0,n[M>>2]=n[O>>2],w9(M,m,d),m=n[(Fd()|0)>>2]|0,m|0){s=M+4|0,l=M+8|0,c=M+8|0;do{if(Q=n[m+4>>2]|0,xc(j,n[(Rd(Q)|0)>>2]|0),KLe(se,F9(Q)|0),B=n[se>>2]|0,B|0){n[M>>2]=0,n[s>>2]=0,n[l>>2]=0;do xc(je,n[(Rd(n[B+4>>2]|0)|0)>>2]|0),k=n[s>>2]|0,k>>>0<(n[c>>2]|0)>>>0?(n[k>>2]=n[je>>2],n[s>>2]=(n[s>>2]|0)+4):rR(M,je),B=n[B>>2]|0;while(B|0);VLe(f,j,M),$A(M)}n[Oe>>2]=n[j>>2],O=R9(Q)|0,n[M>>2]=n[Oe>>2],w9(M,O,d),tG(se),m=n[m>>2]|0}while(m|0)}C=Qe}function GLe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,oMe(s,l,c,f)}function YLe(s,l,c){s=s|0,l=l|0,c=c|0,sMe(s,l,c)}function Rd(s){return s=s|0,s|0}function WLe(s,l,c){s=s|0,l=l|0,c=c|0,tMe(s,l,c)}function F9(s){return s=s|0,s+16|0}function KLe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;if(m=C,C=C+16|0,d=m+8|0,c=m,n[s>>2]=0,f=n[l>>2]|0,n[d>>2]=f,n[c>>2]=s,c=eMe(c)|0,f|0){if(f=Kt(12)|0,B=(T9(d)|0)+4|0,s=n[B+4>>2]|0,l=f+4|0,n[l>>2]=n[B>>2],n[l+4>>2]=s,l=n[n[d>>2]>>2]|0,n[d>>2]=l,!l)s=f;else for(l=f;s=Kt(12)|0,Q=(T9(d)|0)+4|0,k=n[Q+4>>2]|0,B=s+4|0,n[B>>2]=n[Q>>2],n[B+4>>2]=k,n[l>>2]=s,B=n[n[d>>2]>>2]|0,n[d>>2]=B,B;)l=s;n[s>>2]=n[c>>2],n[c>>2]=f}C=m}function VLe(s,l,c){s=s|0,l=l|0,c=c|0,zLe(s,l,c)}function R9(s){return s=s|0,s+24|0}function zLe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+32|0,B=f+24|0,d=f+16|0,k=f+12|0,m=f,za(d),s=ya(s)|0,n[k>>2]=n[l>>2],iR(m,c),n[B>>2]=n[k>>2],JLe(s,B,m),$A(m),Ja(d),C=f}function JLe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=C,C=C+32|0,B=f+16|0,k=f+12|0,d=f,m=Pl(XLe()|0)|0,n[k>>2]=n[l>>2],n[B>>2]=n[k>>2],l=b0(B)|0,n[d>>2]=n[c>>2],B=c+4|0,n[d+4>>2]=n[B>>2],k=c+8|0,n[d+8>>2]=n[k>>2],n[k>>2]=0,n[B>>2]=0,n[c>>2]=0,oo(0,m|0,s|0,l|0,oR(d)|0)|0,$A(d),C=f}function XLe(){var s=0;return o[7976]|0||(ZLe(10720),s=7976,n[s>>2]=1,n[s+4>>2]=0),10720}function ZLe(s){s=s|0,Sl(s,$Le()|0,2)}function $Le(){return 1732}function eMe(s){return s=s|0,n[s>>2]|0}function T9(s){return s=s|0,n[s>>2]|0}function tMe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+32|0,m=f+16|0,d=f+8|0,B=f,za(d),s=ya(s)|0,n[B>>2]=n[l>>2],c=n[c>>2]|0,n[m>>2]=n[B>>2],N9(s,m,c),Ja(d),C=f}function N9(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+16|0,m=f+4|0,B=f,d=Pl(rMe()|0)|0,n[B>>2]=n[l>>2],n[m>>2]=n[B>>2],l=b0(m)|0,oo(0,d|0,s|0,l|0,I9(c)|0)|0,C=f}function rMe(){var s=0;return o[7984]|0||(nMe(10732),s=7984,n[s>>2]=1,n[s+4>>2]=0),10732}function nMe(s){s=s|0,Sl(s,iMe()|0,2)}function iMe(){return 1744}function sMe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;f=C,C=C+32|0,m=f+16|0,d=f+8|0,B=f,za(d),s=ya(s)|0,n[B>>2]=n[l>>2],c=n[c>>2]|0,n[m>>2]=n[B>>2],N9(s,m,c),Ja(d),C=f}function oMe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+32|0,B=d+16|0,m=d+8|0,k=d,za(m),s=ya(s)|0,n[k>>2]=n[l>>2],c=o[c>>0]|0,f=o[f>>0]|0,n[B>>2]=n[k>>2],aMe(s,B,c,f),Ja(m),C=d}function aMe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+16|0,B=d+4|0,k=d,m=Pl(lMe()|0)|0,n[k>>2]=n[l>>2],n[B>>2]=n[k>>2],l=b0(B)|0,c=Td(c)|0,hc(0,m|0,s|0,l|0,c|0,Td(f)|0)|0,C=d}function lMe(){var s=0;return o[7992]|0||(uMe(10744),s=7992,n[s>>2]=1,n[s+4>>2]=0),10744}function Td(s){return s=s|0,cMe(s)|0}function cMe(s){return s=s|0,s&255|0}function uMe(s){s=s|0,Sl(s,AMe()|0,3)}function AMe(){return 1756}function fMe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;switch(se=C,C=C+32|0,k=se+8|0,Q=se+4|0,O=se+20|0,M=se,DF(s,0),f=cUe(l)|0,n[k>>2]=0,j=k+4|0,n[j>>2]=0,n[k+8>>2]=0,f<<24>>24){case 0:{o[O>>0]=0,pMe(Q,c,O),Kv(s,Q)|0,jA(Q);break}case 8:{j=fR(l)|0,o[O>>0]=8,xc(M,n[j+4>>2]|0),hMe(Q,c,O,M,j+8|0),Kv(s,Q)|0,jA(Q);break}case 9:{if(m=fR(l)|0,l=n[m+4>>2]|0,l|0)for(B=k+8|0,d=m+12|0;l=l+-1|0,xc(Q,n[d>>2]|0),f=n[j>>2]|0,f>>>0<(n[B>>2]|0)>>>0?(n[f>>2]=n[Q>>2],n[j>>2]=(n[j>>2]|0)+4):rR(k,Q),l;)d=d+4|0;o[O>>0]=9,xc(M,n[m+8>>2]|0),gMe(Q,c,O,M,k),Kv(s,Q)|0,jA(Q);break}default:j=fR(l)|0,o[O>>0]=f,xc(M,n[j+4>>2]|0),dMe(Q,c,O,M),Kv(s,Q)|0,jA(Q)}$A(k),C=se}function pMe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;f=C,C=C+16|0,d=f,za(d),l=ya(l)|0,xMe(s,l,o[c>>0]|0),Ja(d),C=f}function Kv(s,l){s=s|0,l=l|0;var c=0;return c=n[s>>2]|0,c|0&&PA(c|0),n[s>>2]=n[l>>2],n[l>>2]=0,s|0}function hMe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0;m=C,C=C+32|0,k=m+16|0,B=m+8|0,Q=m,za(B),l=ya(l)|0,c=o[c>>0]|0,n[Q>>2]=n[f>>2],d=n[d>>2]|0,n[k>>2]=n[Q>>2],DMe(s,l,c,k,d),Ja(B),C=m}function gMe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0;m=C,C=C+32|0,Q=m+24|0,B=m+16|0,O=m+12|0,k=m,za(B),l=ya(l)|0,c=o[c>>0]|0,n[O>>2]=n[f>>2],iR(k,d),n[Q>>2]=n[O>>2],wMe(s,l,c,Q,k),$A(k),Ja(B),C=m}function dMe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+32|0,B=d+16|0,m=d+8|0,k=d,za(m),l=ya(l)|0,c=o[c>>0]|0,n[k>>2]=n[f>>2],n[B>>2]=n[k>>2],mMe(s,l,c,B),Ja(m),C=d}function mMe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0,B=0,k=0;d=C,C=C+16|0,m=d+4|0,k=d,B=Pl(yMe()|0)|0,c=Td(c)|0,n[k>>2]=n[f>>2],n[m>>2]=n[k>>2],Vv(s,oo(0,B|0,l|0,c|0,b0(m)|0)|0),C=d}function yMe(){var s=0;return o[8e3]|0||(EMe(10756),s=8e3,n[s>>2]=1,n[s+4>>2]=0),10756}function Vv(s,l){s=s|0,l=l|0,DF(s,l)}function EMe(s){s=s|0,Sl(s,CMe()|0,2)}function CMe(){return 1772}function wMe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0;m=C,C=C+32|0,Q=m+16|0,O=m+12|0,B=m,k=Pl(IMe()|0)|0,c=Td(c)|0,n[O>>2]=n[f>>2],n[Q>>2]=n[O>>2],f=b0(Q)|0,n[B>>2]=n[d>>2],Q=d+4|0,n[B+4>>2]=n[Q>>2],O=d+8|0,n[B+8>>2]=n[O>>2],n[O>>2]=0,n[Q>>2]=0,n[d>>2]=0,Vv(s,hc(0,k|0,l|0,c|0,f|0,oR(B)|0)|0),$A(B),C=m}function IMe(){var s=0;return o[8008]|0||(BMe(10768),s=8008,n[s>>2]=1,n[s+4>>2]=0),10768}function BMe(s){s=s|0,Sl(s,vMe()|0,3)}function vMe(){return 1784}function DMe(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0;m=C,C=C+16|0,k=m+4|0,Q=m,B=Pl(PMe()|0)|0,c=Td(c)|0,n[Q>>2]=n[f>>2],n[k>>2]=n[Q>>2],f=b0(k)|0,Vv(s,hc(0,B|0,l|0,c|0,f|0,sR(d)|0)|0),C=m}function PMe(){var s=0;return o[8016]|0||(SMe(10780),s=8016,n[s>>2]=1,n[s+4>>2]=0),10780}function SMe(s){s=s|0,Sl(s,bMe()|0,3)}function bMe(){return 1800}function xMe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;f=Pl(kMe()|0)|0,Vv(s,Qn(0,f|0,l|0,Td(c)|0)|0)}function kMe(){var s=0;return o[8024]|0||(QMe(10792),s=8024,n[s>>2]=1,n[s+4>>2]=0),10792}function QMe(s){s=s|0,Sl(s,FMe()|0,1)}function FMe(){return 1816}function RMe(){TMe(),NMe(),LMe()}function TMe(){n[2702]=c7(65536)|0}function NMe(){rOe(10856)}function LMe(){MMe(10816)}function MMe(s){s=s|0,OMe(s,5044),UMe(s)|0}function OMe(s,l){s=s|0,l=l|0;var c=0;c=D9()|0,n[s>>2]=c,JMe(c,l),Pp(n[s>>2]|0)}function UMe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,_Me()|0),s|0}function _Me(){var s=0;return o[8032]|0||(L9(10820),ir(64,10820,U|0)|0,s=8032,n[s>>2]=1,n[s+4>>2]=0),Rr(10820)|0||L9(10820),10820}function L9(s){s=s|0,jMe(s),S0(s,25)}function HMe(s){s=s|0,qMe(s+24|0)}function qMe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function jMe(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,18,l,KMe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function GMe(s,l){s=s|0,l=l|0,YMe(s,l)}function YMe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;c=C,C=C+16|0,f=c,d=c+4|0,v0(d,l),n[f>>2]=D0(d,l)|0,WMe(s,f),C=c}function WMe(s,l){s=s|0,l=l|0,M9(s+4|0,n[l>>2]|0),o[s+8>>0]=1}function M9(s,l){s=s|0,l=l|0,n[s>>2]=l}function KMe(){return 1824}function VMe(s){return s=s|0,zMe(s)|0}function zMe(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0;return c=C,C=C+16|0,d=c+4|0,B=c,f=Va(8)|0,l=f,k=Kt(4)|0,v0(d,s),M9(k,D0(d,s)|0),m=l+4|0,n[m>>2]=k,s=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],P9(s,m,d),n[f>>2]=s,C=c,l|0}function Va(s){s=s|0;var l=0,c=0;return s=s+7&-8,s>>>0<=32768&&(l=n[2701]|0,s>>>0<=(65536-l|0)>>>0)?(c=(n[2702]|0)+l|0,n[2701]=l+s,s=c):(s=c7(s+8|0)|0,n[s>>2]=n[2703],n[2703]=s,s=s+8|0),s|0}function JMe(s,l){s=s|0,l=l|0,n[s>>2]=XMe()|0,n[s+4>>2]=ZMe()|0,n[s+12>>2]=l,n[s+8>>2]=$Me()|0,n[s+32>>2]=9}function XMe(){return 11744}function ZMe(){return 1832}function $Me(){return Yv()|0}function eOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(tOe(c),gt(c)):l|0&>(l)}function tOe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function rOe(s){s=s|0,nOe(s,5052),iOe(s)|0,sOe(s,5058,26)|0,oOe(s,5069,1)|0,aOe(s,5077,10)|0,lOe(s,5087,19)|0,cOe(s,5094,27)|0}function nOe(s,l){s=s|0,l=l|0;var c=0;c=tUe()|0,n[s>>2]=c,rUe(c,l),Pp(n[s>>2]|0)}function iOe(s){s=s|0;var l=0;return l=n[s>>2]|0,P0(l,H4e()|0),s|0}function sOe(s,l,c){return s=s|0,l=l|0,c=c|0,B4e(s,pn(l)|0,c,0),s|0}function oOe(s,l,c){return s=s|0,l=l|0,c=c|0,l4e(s,pn(l)|0,c,0),s|0}function aOe(s,l,c){return s=s|0,l=l|0,c=c|0,_Oe(s,pn(l)|0,c,0),s|0}function lOe(s,l,c){return s=s|0,l=l|0,c=c|0,DOe(s,pn(l)|0,c,0),s|0}function O9(s,l){s=s|0,l=l|0;var c=0,f=0;e:for(;;){for(c=n[2703]|0;;){if((c|0)==(l|0))break e;if(f=n[c>>2]|0,n[2703]=f,!c)c=f;else break}gt(c)}n[2701]=s}function cOe(s,l,c){return s=s|0,l=l|0,c=c|0,uOe(s,pn(l)|0,c,0),s|0}function uOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=aR()|0,s=AOe(c)|0,hn(m,l,d,s,fOe(c,f)|0,f)}function aR(){var s=0,l=0;if(o[8040]|0||(_9(10860),ir(65,10860,U|0)|0,l=8040,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10860)|0)){s=10860,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));_9(10860)}return 10860}function AOe(s){return s=s|0,s|0}function fOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=aR()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(U9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(pOe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function U9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function pOe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=hOe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,gOe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,U9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,dOe(s,d),mOe(d),C=k;return}}function hOe(s){return s=s|0,536870911}function gOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function dOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function mOe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function _9(s){s=s|0,COe(s)}function yOe(s){s=s|0,EOe(s+24|0)}function EOe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function COe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,11,l,wOe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function wOe(){return 1840}function IOe(s,l,c){s=s|0,l=l|0,c=c|0,vOe(n[(BOe(s)|0)>>2]|0,l,c)}function BOe(s){return s=s|0,(n[(aR()|0)+24>>2]|0)+(s<<3)|0}function vOe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;f=C,C=C+16|0,m=f+1|0,d=f,v0(m,l),l=D0(m,l)|0,v0(d,c),c=D0(d,c)|0,tf[s&31](l,c),C=f}function DOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=lR()|0,s=POe(c)|0,hn(m,l,d,s,SOe(c,f)|0,f)}function lR(){var s=0,l=0;if(o[8048]|0||(q9(10896),ir(66,10896,U|0)|0,l=8048,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10896)|0)){s=10896,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));q9(10896)}return 10896}function POe(s){return s=s|0,s|0}function SOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=lR()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(H9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(bOe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function H9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function bOe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=xOe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,kOe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,H9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,QOe(s,d),FOe(d),C=k;return}}function xOe(s){return s=s|0,536870911}function kOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function QOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function FOe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function q9(s){s=s|0,NOe(s)}function ROe(s){s=s|0,TOe(s+24|0)}function TOe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function NOe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,11,l,LOe()|0,1),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function LOe(){return 1852}function MOe(s,l){return s=s|0,l=l|0,UOe(n[(OOe(s)|0)>>2]|0,l)|0}function OOe(s){return s=s|0,(n[(lR()|0)+24>>2]|0)+(s<<3)|0}function UOe(s,l){s=s|0,l=l|0;var c=0,f=0;return c=C,C=C+16|0,f=c,v0(f,l),l=D0(f,l)|0,l=jv(F0[s&31](l)|0)|0,C=c,l|0}function _Oe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=cR()|0,s=HOe(c)|0,hn(m,l,d,s,qOe(c,f)|0,f)}function cR(){var s=0,l=0;if(o[8056]|0||(G9(10932),ir(67,10932,U|0)|0,l=8056,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10932)|0)){s=10932,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));G9(10932)}return 10932}function HOe(s){return s=s|0,s|0}function qOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=cR()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(j9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(jOe(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function j9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function jOe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=GOe(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,YOe(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,j9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,WOe(s,d),KOe(d),C=k;return}}function GOe(s){return s=s|0,536870911}function YOe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function WOe(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function KOe(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function G9(s){s=s|0,JOe(s)}function VOe(s){s=s|0,zOe(s+24|0)}function zOe(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function JOe(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,7,l,XOe()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function XOe(){return 1860}function ZOe(s,l,c){return s=s|0,l=l|0,c=c|0,e4e(n[($Oe(s)|0)>>2]|0,l,c)|0}function $Oe(s){return s=s|0,(n[(cR()|0)+24>>2]|0)+(s<<3)|0}function e4e(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0;return f=C,C=C+32|0,B=f+12|0,m=f+8|0,k=f,Q=f+16|0,d=f+4|0,t4e(Q,l),r4e(k,Q,l),Ip(d,c),c=Bp(d,c)|0,n[B>>2]=n[k>>2],vw[s&15](m,B,c),c=n4e(m)|0,jA(m),vp(d),C=f,c|0}function t4e(s,l){s=s|0,l=l|0}function r4e(s,l,c){s=s|0,l=l|0,c=c|0,i4e(s,c)}function n4e(s){return s=s|0,ya(s)|0}function i4e(s,l){s=s|0,l=l|0;var c=0,f=0,d=0;d=C,C=C+16|0,c=d,f=l,f&1?(s4e(c,0),ii(f|0,c|0)|0,o4e(s,c),a4e(c)):n[s>>2]=n[l>>2],C=d}function s4e(s,l){s=s|0,l=l|0,K5(s,l),n[s+4>>2]=0,o[s+8>>0]=0}function o4e(s,l){s=s|0,l=l|0,n[s>>2]=n[l+4>>2]}function a4e(s){s=s|0,o[s+8>>0]=0}function l4e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=uR()|0,s=c4e(c)|0,hn(m,l,d,s,u4e(c,f)|0,f)}function uR(){var s=0,l=0;if(o[8064]|0||(W9(10968),ir(68,10968,U|0)|0,l=8064,n[l>>2]=1,n[l+4>>2]=0),!(Rr(10968)|0)){s=10968,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));W9(10968)}return 10968}function c4e(s){return s=s|0,s|0}function u4e(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=uR()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(Y9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(A4e(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function Y9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function A4e(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=f4e(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,p4e(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,Y9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,h4e(s,d),g4e(d),C=k;return}}function f4e(s){return s=s|0,536870911}function p4e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function h4e(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function g4e(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function W9(s){s=s|0,y4e(s)}function d4e(s){s=s|0,m4e(s+24|0)}function m4e(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function y4e(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,1,l,E4e()|0,5),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function E4e(){return 1872}function C4e(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,I4e(n[(w4e(s)|0)>>2]|0,l,c,f,d,m)}function w4e(s){return s=s|0,(n[(uR()|0)+24>>2]|0)+(s<<3)|0}function I4e(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0,Q=0,O=0,M=0,j=0;B=C,C=C+32|0,k=B+16|0,Q=B+12|0,O=B+8|0,M=B+4|0,j=B,Ip(k,l),l=Bp(k,l)|0,Ip(Q,c),c=Bp(Q,c)|0,Ip(O,f),f=Bp(O,f)|0,Ip(M,d),d=Bp(M,d)|0,Ip(j,m),m=Bp(j,m)|0,h7[s&1](l,c,f,d,m),vp(j),vp(M),vp(O),vp(Q),vp(k),C=B}function B4e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;m=n[s>>2]|0,d=AR()|0,s=v4e(c)|0,hn(m,l,d,s,D4e(c,f)|0,f)}function AR(){var s=0,l=0;if(o[8072]|0||(V9(11004),ir(69,11004,U|0)|0,l=8072,n[l>>2]=1,n[l+4>>2]=0),!(Rr(11004)|0)){s=11004,l=s+36|0;do n[s>>2]=0,s=s+4|0;while((s|0)<(l|0));V9(11004)}return 11004}function v4e(s){return s=s|0,s|0}function D4e(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0,k=0,Q=0;return k=C,C=C+16|0,d=k,m=k+4|0,n[d>>2]=s,Q=AR()|0,B=Q+24|0,l=gr(l,4)|0,n[m>>2]=l,c=Q+28|0,f=n[c>>2]|0,f>>>0<(n[Q+32>>2]|0)>>>0?(K9(f,s,l),l=(n[c>>2]|0)+8|0,n[c>>2]=l):(P4e(B,d,m),l=n[c>>2]|0),C=k,(l-(n[B>>2]|0)>>3)+-1|0}function K9(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,n[s+4>>2]=c}function P4e(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0;if(k=C,C=C+32|0,d=k,m=s+4|0,B=((n[m>>2]|0)-(n[s>>2]|0)>>3)+1|0,f=S4e(s)|0,f>>>0>>0)Jr(s);else{Q=n[s>>2]|0,M=(n[s+8>>2]|0)-Q|0,O=M>>2,b4e(d,M>>3>>>0>>1>>>0?O>>>0>>0?B:O:f,(n[m>>2]|0)-Q>>3,s+8|0),B=d+8|0,K9(n[B>>2]|0,n[l>>2]|0,n[c>>2]|0),n[B>>2]=(n[B>>2]|0)+8,x4e(s,d),k4e(d),C=k;return}}function S4e(s){return s=s|0,536870911}function b4e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0;n[s+12>>2]=0,n[s+16>>2]=f;do if(l)if(l>>>0>536870911)Tt();else{d=Kt(l<<3)|0;break}else d=0;while(!1);n[s>>2]=d,f=d+(c<<3)|0,n[s+8>>2]=f,n[s+4>>2]=f,n[s+12>>2]=d+(l<<3)}function x4e(s,l){s=s|0,l=l|0;var c=0,f=0,d=0,m=0,B=0;f=n[s>>2]|0,B=s+4|0,m=l+4|0,d=(n[B>>2]|0)-f|0,c=(n[m>>2]|0)+(0-(d>>3)<<3)|0,n[m>>2]=c,(d|0)>0?(Dr(c|0,f|0,d|0)|0,f=m,c=n[m>>2]|0):f=m,m=n[s>>2]|0,n[s>>2]=c,n[f>>2]=m,m=l+8|0,d=n[B>>2]|0,n[B>>2]=n[m>>2],n[m>>2]=d,m=s+8|0,B=l+12|0,s=n[m>>2]|0,n[m>>2]=n[B>>2],n[B>>2]=s,n[l>>2]=n[f>>2]}function k4e(s){s=s|0;var l=0,c=0,f=0;l=n[s+4>>2]|0,c=s+8|0,f=n[c>>2]|0,(f|0)!=(l|0)&&(n[c>>2]=f+(~((f+-8-l|0)>>>3)<<3)),s=n[s>>2]|0,s|0&>(s)}function V9(s){s=s|0,R4e(s)}function Q4e(s){s=s|0,F4e(s+24|0)}function F4e(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function R4e(s){s=s|0;var l=0;l=Vr()|0,zr(s,1,12,l,T4e()|0,2),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function T4e(){return 1896}function N4e(s,l,c){s=s|0,l=l|0,c=c|0,M4e(n[(L4e(s)|0)>>2]|0,l,c)}function L4e(s){return s=s|0,(n[(AR()|0)+24>>2]|0)+(s<<3)|0}function M4e(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;f=C,C=C+16|0,m=f+4|0,d=f,O4e(m,l),l=U4e(m,l)|0,Ip(d,c),c=Bp(d,c)|0,tf[s&31](l,c),vp(d),C=f}function O4e(s,l){s=s|0,l=l|0}function U4e(s,l){return s=s|0,l=l|0,_4e(l)|0}function _4e(s){return s=s|0,s|0}function H4e(){var s=0;return o[8080]|0||(z9(11040),ir(70,11040,U|0)|0,s=8080,n[s>>2]=1,n[s+4>>2]=0),Rr(11040)|0||z9(11040),11040}function z9(s){s=s|0,G4e(s),S0(s,71)}function q4e(s){s=s|0,j4e(s+24|0)}function j4e(s){s=s|0;var l=0,c=0,f=0;c=n[s>>2]|0,f=c,c|0&&(s=s+4|0,l=n[s>>2]|0,(l|0)!=(c|0)&&(n[s>>2]=l+(~((l+-8-f|0)>>>3)<<3)),gt(c))}function G4e(s){s=s|0;var l=0;l=Vr()|0,zr(s,5,7,l,V4e()|0,0),n[s+24>>2]=0,n[s+28>>2]=0,n[s+32>>2]=0}function Y4e(s){s=s|0,W4e(s)}function W4e(s){s=s|0,K4e(s)}function K4e(s){s=s|0,o[s+8>>0]=1}function V4e(){return 1936}function z4e(){return J4e()|0}function J4e(){var s=0,l=0,c=0,f=0,d=0,m=0,B=0;return l=C,C=C+16|0,d=l+4|0,B=l,c=Va(8)|0,s=c,m=s+4|0,n[m>>2]=Kt(1)|0,f=Kt(8)|0,m=n[m>>2]|0,n[B>>2]=0,n[d>>2]=n[B>>2],X4e(f,m,d),n[c>>2]=f,C=l,s|0}function X4e(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]=l,c=Kt(16)|0,n[c+4>>2]=0,n[c+8>>2]=0,n[c>>2]=1916,n[c+12>>2]=l,n[s+4>>2]=c}function Z4e(s){s=s|0,Md(s),gt(s)}function $4e(s){s=s|0,s=n[s+12>>2]|0,s|0&>(s)}function eUe(s){s=s|0,gt(s)}function tUe(){var s=0;return o[8088]|0||(lUe(11076),ir(25,11076,U|0)|0,s=8088,n[s>>2]=1,n[s+4>>2]=0),11076}function rUe(s,l){s=s|0,l=l|0,n[s>>2]=nUe()|0,n[s+4>>2]=iUe()|0,n[s+12>>2]=l,n[s+8>>2]=sUe()|0,n[s+32>>2]=10}function nUe(){return 11745}function iUe(){return 1940}function sUe(){return Gv()|0}function oUe(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,(Dp(f,896)|0)==512?c|0&&(aUe(c),gt(c)):l|0&>(l)}function aUe(s){s=s|0,s=n[s+4>>2]|0,s|0&&Sp(s)}function lUe(s){s=s|0,wp(s)}function xc(s,l){s=s|0,l=l|0,n[s>>2]=l}function fR(s){return s=s|0,n[s>>2]|0}function cUe(s){return s=s|0,o[n[s>>2]>>0]|0}function uUe(s,l){s=s|0,l=l|0;var c=0,f=0;c=C,C=C+16|0,f=c,n[f>>2]=n[s>>2],AUe(l,f)|0,C=c}function AUe(s,l){s=s|0,l=l|0;var c=0;return c=fUe(n[s>>2]|0,l)|0,l=s+4|0,n[(n[l>>2]|0)+8>>2]=c,n[(n[l>>2]|0)+8>>2]|0}function fUe(s,l){s=s|0,l=l|0;var c=0,f=0;return c=C,C=C+16|0,f=c,za(f),s=ya(s)|0,l=pUe(s,n[l>>2]|0)|0,Ja(f),C=c,l|0}function za(s){s=s|0,n[s>>2]=n[2701],n[s+4>>2]=n[2703]}function pUe(s,l){s=s|0,l=l|0;var c=0;return c=Pl(hUe()|0)|0,Qn(0,c|0,s|0,sR(l)|0)|0}function Ja(s){s=s|0,O9(n[s>>2]|0,n[s+4>>2]|0)}function hUe(){var s=0;return o[8096]|0||(gUe(11120),s=8096,n[s>>2]=1,n[s+4>>2]=0),11120}function gUe(s){s=s|0,Sl(s,dUe()|0,1)}function dUe(){return 1948}function mUe(){yUe()}function yUe(){var s=0,l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0;if(Oe=C,C=C+16|0,M=Oe+4|0,j=Oe,Ti(65536,10804,n[2702]|0,10812),c=E9()|0,l=n[c>>2]|0,s=n[l>>2]|0,s|0)for(f=n[c+8>>2]|0,c=n[c+4>>2]|0;Ac(s|0,u[c>>0]|0|0,o[f>>0]|0),l=l+4|0,s=n[l>>2]|0,s;)f=f+1|0,c=c+1|0;if(s=C9()|0,l=n[s>>2]|0,l|0)do fu(l|0,n[s+4>>2]|0),s=s+8|0,l=n[s>>2]|0;while(l|0);fu(EUe()|0,5167),O=Fd()|0,s=n[O>>2]|0;e:do if(s|0){do CUe(n[s+4>>2]|0),s=n[s>>2]|0;while(s|0);if(s=n[O>>2]|0,s|0){Q=O;do{for(;d=s,s=n[s>>2]|0,d=n[d+4>>2]|0,!!(wUe(d)|0);)if(n[j>>2]=Q,n[M>>2]=n[j>>2],IUe(O,M)|0,!s)break e;if(BUe(d),Q=n[Q>>2]|0,l=J9(d)|0,m=Hi()|0,B=C,C=C+((1*(l<<2)|0)+15&-16)|0,k=C,C=C+((1*(l<<2)|0)+15&-16)|0,l=n[(F9(d)|0)>>2]|0,l|0)for(c=B,f=k;n[c>>2]=n[(Rd(n[l+4>>2]|0)|0)>>2],n[f>>2]=n[l+8>>2],l=n[l>>2]|0,l;)c=c+4|0,f=f+4|0;Qe=Rd(d)|0,l=vUe(d)|0,c=J9(d)|0,f=DUe(d)|0,pu(Qe|0,l|0,B|0,k|0,c|0,f|0,$F(d)|0),_i(m|0)}while(s|0)}}while(!1);if(s=n[(eR()|0)>>2]|0,s|0)do Qe=s+4|0,O=tR(Qe)|0,d=Cw(O)|0,m=yw(O)|0,B=(Ew(O)|0)+1|0,k=zv(O)|0,Q=X9(Qe)|0,O=Rr(O)|0,M=Wv(Qe)|0,j=pR(Qe)|0,El(0,d|0,m|0,B|0,k|0,Q|0,O|0,M|0,j|0,hR(Qe)|0),s=n[s>>2]|0;while(s|0);s=n[(Fd()|0)>>2]|0;e:do if(s|0){t:for(;;){if(l=n[s+4>>2]|0,l|0&&(se=n[(Rd(l)|0)>>2]|0,je=n[(R9(l)|0)>>2]|0,je|0)){c=je;do{l=c+4|0,f=tR(l)|0;r:do if(f|0)switch(Rr(f)|0){case 0:break t;case 4:case 3:case 2:{k=Cw(f)|0,Q=yw(f)|0,O=(Ew(f)|0)+1|0,M=zv(f)|0,j=Rr(f)|0,Qe=Wv(l)|0,El(se|0,k|0,Q|0,O|0,M|0,0,j|0,Qe|0,pR(l)|0,hR(l)|0);break r}case 1:{B=Cw(f)|0,k=yw(f)|0,Q=(Ew(f)|0)+1|0,O=zv(f)|0,M=X9(l)|0,j=Rr(f)|0,Qe=Wv(l)|0,El(se|0,B|0,k|0,Q|0,O|0,M|0,j|0,Qe|0,pR(l)|0,hR(l)|0);break r}case 5:{O=Cw(f)|0,M=yw(f)|0,j=(Ew(f)|0)+1|0,Qe=zv(f)|0,El(se|0,O|0,M|0,j|0,Qe|0,PUe(f)|0,Rr(f)|0,0,0,0);break r}default:break r}while(!1);c=n[c>>2]|0}while(c|0)}if(s=n[s>>2]|0,!s)break e}Tt()}while(!1);Ie(),C=Oe}function EUe(){return 11703}function CUe(s){s=s|0,o[s+40>>0]=0}function wUe(s){return s=s|0,(o[s+40>>0]|0)!=0|0}function IUe(s,l){return s=s|0,l=l|0,l=SUe(l)|0,s=n[l>>2]|0,n[l>>2]=n[s>>2],gt(s),n[l>>2]|0}function BUe(s){s=s|0,o[s+40>>0]=1}function J9(s){return s=s|0,n[s+20>>2]|0}function vUe(s){return s=s|0,n[s+8>>2]|0}function DUe(s){return s=s|0,n[s+32>>2]|0}function zv(s){return s=s|0,n[s+4>>2]|0}function X9(s){return s=s|0,n[s+4>>2]|0}function pR(s){return s=s|0,n[s+8>>2]|0}function hR(s){return s=s|0,n[s+16>>2]|0}function PUe(s){return s=s|0,n[s+20>>2]|0}function SUe(s){return s=s|0,n[s>>2]|0}function Jv(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0,lt=0,_e=0,qe=0,Lt=0;Lt=C,C=C+16|0,se=Lt;do if(s>>>0<245){if(O=s>>>0<11?16:s+11&-8,s=O>>>3,j=n[2783]|0,c=j>>>s,c&3|0)return l=(c&1^1)+s|0,s=11172+(l<<1<<2)|0,c=s+8|0,f=n[c>>2]|0,d=f+8|0,m=n[d>>2]|0,(s|0)==(m|0)?n[2783]=j&~(1<>2]=s,n[c>>2]=m),qe=l<<3,n[f+4>>2]=qe|3,qe=f+qe+4|0,n[qe>>2]=n[qe>>2]|1,qe=d,C=Lt,qe|0;if(M=n[2785]|0,O>>>0>M>>>0){if(c|0)return l=2<>>12&16,l=l>>>B,c=l>>>5&8,l=l>>>c,d=l>>>2&4,l=l>>>d,s=l>>>1&2,l=l>>>s,f=l>>>1&1,f=(c|B|d|s|f)+(l>>>f)|0,l=11172+(f<<1<<2)|0,s=l+8|0,d=n[s>>2]|0,B=d+8|0,c=n[B>>2]|0,(l|0)==(c|0)?(s=j&~(1<>2]=l,n[s>>2]=c,s=j),m=(f<<3)-O|0,n[d+4>>2]=O|3,f=d+O|0,n[f+4>>2]=m|1,n[f+m>>2]=m,M|0&&(d=n[2788]|0,l=M>>>3,c=11172+(l<<1<<2)|0,l=1<>2]|0):(n[2783]=s|l,l=c,s=c+8|0),n[s>>2]=d,n[l+12>>2]=d,n[d+8>>2]=l,n[d+12>>2]=c),n[2785]=m,n[2788]=f,qe=B,C=Lt,qe|0;if(k=n[2784]|0,k){if(c=(k&0-k)+-1|0,B=c>>>12&16,c=c>>>B,m=c>>>5&8,c=c>>>m,Q=c>>>2&4,c=c>>>Q,f=c>>>1&2,c=c>>>f,s=c>>>1&1,s=n[11436+((m|B|Q|f|s)+(c>>>s)<<2)>>2]|0,c=(n[s+4>>2]&-8)-O|0,f=n[s+16+(((n[s+16>>2]|0)==0&1)<<2)>>2]|0,!f)Q=s,m=c;else{do B=(n[f+4>>2]&-8)-O|0,Q=B>>>0>>0,c=Q?B:c,s=Q?f:s,f=n[f+16+(((n[f+16>>2]|0)==0&1)<<2)>>2]|0;while(f|0);Q=s,m=c}if(B=Q+O|0,Q>>>0>>0){d=n[Q+24>>2]|0,l=n[Q+12>>2]|0;do if((l|0)==(Q|0)){if(s=Q+20|0,l=n[s>>2]|0,!l&&(s=Q+16|0,l=n[s>>2]|0,!l)){c=0;break}for(;;){if(c=l+20|0,f=n[c>>2]|0,f|0){l=f,s=c;continue}if(c=l+16|0,f=n[c>>2]|0,f)l=f,s=c;else break}n[s>>2]=0,c=l}else c=n[Q+8>>2]|0,n[c+12>>2]=l,n[l+8>>2]=c,c=l;while(!1);do if(d|0){if(l=n[Q+28>>2]|0,s=11436+(l<<2)|0,(Q|0)==(n[s>>2]|0)){if(n[s>>2]=c,!c){n[2784]=k&~(1<>2]|0)!=(Q|0)&1)<<2)>>2]=c,!c)break;n[c+24>>2]=d,l=n[Q+16>>2]|0,l|0&&(n[c+16>>2]=l,n[l+24>>2]=c),l=n[Q+20>>2]|0,l|0&&(n[c+20>>2]=l,n[l+24>>2]=c)}while(!1);return m>>>0<16?(qe=m+O|0,n[Q+4>>2]=qe|3,qe=Q+qe+4|0,n[qe>>2]=n[qe>>2]|1):(n[Q+4>>2]=O|3,n[B+4>>2]=m|1,n[B+m>>2]=m,M|0&&(f=n[2788]|0,l=M>>>3,c=11172+(l<<1<<2)|0,l=1<>2]|0):(n[2783]=j|l,l=c,s=c+8|0),n[s>>2]=f,n[l+12>>2]=f,n[f+8>>2]=l,n[f+12>>2]=c),n[2785]=m,n[2788]=B),qe=Q+8|0,C=Lt,qe|0}else j=O}else j=O}else j=O}else if(s>>>0<=4294967231)if(s=s+11|0,O=s&-8,Q=n[2784]|0,Q){f=0-O|0,s=s>>>8,s?O>>>0>16777215?k=31:(j=(s+1048320|0)>>>16&8,_e=s<>>16&4,_e=_e<>>16&2,k=14-(M|j|k)+(_e<>>15)|0,k=O>>>(k+7|0)&1|k<<1):k=0,c=n[11436+(k<<2)>>2]|0;e:do if(!c)c=0,s=0,_e=57;else for(s=0,B=O<<((k|0)==31?0:25-(k>>>1)|0),m=0;;){if(d=(n[c+4>>2]&-8)-O|0,d>>>0>>0)if(d)s=c,f=d;else{s=c,f=0,d=c,_e=61;break e}if(d=n[c+20>>2]|0,c=n[c+16+(B>>>31<<2)>>2]|0,m=(d|0)==0|(d|0)==(c|0)?m:d,d=(c|0)==0,d){c=m,_e=57;break}else B=B<<((d^1)&1)}while(!1);if((_e|0)==57){if((c|0)==0&(s|0)==0){if(s=2<>>12&16,j=j>>>B,m=j>>>5&8,j=j>>>m,k=j>>>2&4,j=j>>>k,M=j>>>1&2,j=j>>>M,c=j>>>1&1,s=0,c=n[11436+((m|B|k|M|c)+(j>>>c)<<2)>>2]|0}c?(d=c,_e=61):(k=s,B=f)}if((_e|0)==61)for(;;)if(_e=0,c=(n[d+4>>2]&-8)-O|0,j=c>>>0>>0,c=j?c:f,s=j?d:s,d=n[d+16+(((n[d+16>>2]|0)==0&1)<<2)>>2]|0,d)f=c,_e=61;else{k=s,B=c;break}if(k|0&&B>>>0<((n[2785]|0)-O|0)>>>0){if(m=k+O|0,k>>>0>=m>>>0)return qe=0,C=Lt,qe|0;d=n[k+24>>2]|0,l=n[k+12>>2]|0;do if((l|0)==(k|0)){if(s=k+20|0,l=n[s>>2]|0,!l&&(s=k+16|0,l=n[s>>2]|0,!l)){l=0;break}for(;;){if(c=l+20|0,f=n[c>>2]|0,f|0){l=f,s=c;continue}if(c=l+16|0,f=n[c>>2]|0,f)l=f,s=c;else break}n[s>>2]=0}else qe=n[k+8>>2]|0,n[qe+12>>2]=l,n[l+8>>2]=qe;while(!1);do if(d){if(s=n[k+28>>2]|0,c=11436+(s<<2)|0,(k|0)==(n[c>>2]|0)){if(n[c>>2]=l,!l){f=Q&~(1<>2]|0)!=(k|0)&1)<<2)>>2]=l,!l){f=Q;break}n[l+24>>2]=d,s=n[k+16>>2]|0,s|0&&(n[l+16>>2]=s,n[s+24>>2]=l),s=n[k+20>>2]|0,s&&(n[l+20>>2]=s,n[s+24>>2]=l),f=Q}else f=Q;while(!1);do if(B>>>0>=16){if(n[k+4>>2]=O|3,n[m+4>>2]=B|1,n[m+B>>2]=B,l=B>>>3,B>>>0<256){c=11172+(l<<1<<2)|0,s=n[2783]|0,l=1<>2]|0):(n[2783]=s|l,l=c,s=c+8|0),n[s>>2]=m,n[l+12>>2]=m,n[m+8>>2]=l,n[m+12>>2]=c;break}if(l=B>>>8,l?B>>>0>16777215?l=31:(_e=(l+1048320|0)>>>16&8,qe=l<<_e,lt=(qe+520192|0)>>>16&4,qe=qe<>>16&2,l=14-(lt|_e|l)+(qe<>>15)|0,l=B>>>(l+7|0)&1|l<<1):l=0,c=11436+(l<<2)|0,n[m+28>>2]=l,s=m+16|0,n[s+4>>2]=0,n[s>>2]=0,s=1<>2]=m,n[m+24>>2]=c,n[m+12>>2]=m,n[m+8>>2]=m;break}for(s=B<<((l|0)==31?0:25-(l>>>1)|0),c=n[c>>2]|0;;){if((n[c+4>>2]&-8|0)==(B|0)){_e=97;break}if(f=c+16+(s>>>31<<2)|0,l=n[f>>2]|0,l)s=s<<1,c=l;else{_e=96;break}}if((_e|0)==96){n[f>>2]=m,n[m+24>>2]=c,n[m+12>>2]=m,n[m+8>>2]=m;break}else if((_e|0)==97){_e=c+8|0,qe=n[_e>>2]|0,n[qe+12>>2]=m,n[_e>>2]=m,n[m+8>>2]=qe,n[m+12>>2]=c,n[m+24>>2]=0;break}}else qe=B+O|0,n[k+4>>2]=qe|3,qe=k+qe+4|0,n[qe>>2]=n[qe>>2]|1;while(!1);return qe=k+8|0,C=Lt,qe|0}else j=O}else j=O;else j=-1;while(!1);if(c=n[2785]|0,c>>>0>=j>>>0)return l=c-j|0,s=n[2788]|0,l>>>0>15?(qe=s+j|0,n[2788]=qe,n[2785]=l,n[qe+4>>2]=l|1,n[qe+l>>2]=l,n[s+4>>2]=j|3):(n[2785]=0,n[2788]=0,n[s+4>>2]=c|3,qe=s+c+4|0,n[qe>>2]=n[qe>>2]|1),qe=s+8|0,C=Lt,qe|0;if(B=n[2786]|0,B>>>0>j>>>0)return lt=B-j|0,n[2786]=lt,qe=n[2789]|0,_e=qe+j|0,n[2789]=_e,n[_e+4>>2]=lt|1,n[qe+4>>2]=j|3,qe=qe+8|0,C=Lt,qe|0;if(n[2901]|0?s=n[2903]|0:(n[2903]=4096,n[2902]=4096,n[2904]=-1,n[2905]=-1,n[2906]=0,n[2894]=0,s=se&-16^1431655768,n[se>>2]=s,n[2901]=s,s=4096),k=j+48|0,Q=j+47|0,m=s+Q|0,d=0-s|0,O=m&d,O>>>0<=j>>>0||(s=n[2893]|0,s|0&&(M=n[2891]|0,se=M+O|0,se>>>0<=M>>>0|se>>>0>s>>>0)))return qe=0,C=Lt,qe|0;e:do if(n[2894]&4)l=0,_e=133;else{c=n[2789]|0;t:do if(c){for(f=11580;s=n[f>>2]|0,!(s>>>0<=c>>>0&&(Qe=f+4|0,(s+(n[Qe>>2]|0)|0)>>>0>c>>>0));)if(s=n[f+8>>2]|0,s)f=s;else{_e=118;break t}if(l=m-B&d,l>>>0<2147483647)if(s=bp(l|0)|0,(s|0)==((n[f>>2]|0)+(n[Qe>>2]|0)|0)){if((s|0)!=-1){B=l,m=s,_e=135;break e}}else f=s,_e=126;else l=0}else _e=118;while(!1);do if((_e|0)==118)if(c=bp(0)|0,(c|0)!=-1&&(l=c,je=n[2902]|0,Oe=je+-1|0,l=(Oe&l|0?(Oe+l&0-je)-l|0:0)+O|0,je=n[2891]|0,Oe=l+je|0,l>>>0>j>>>0&l>>>0<2147483647)){if(Qe=n[2893]|0,Qe|0&&Oe>>>0<=je>>>0|Oe>>>0>Qe>>>0){l=0;break}if(s=bp(l|0)|0,(s|0)==(c|0)){B=l,m=c,_e=135;break e}else f=s,_e=126}else l=0;while(!1);do if((_e|0)==126){if(c=0-l|0,!(k>>>0>l>>>0&(l>>>0<2147483647&(f|0)!=-1)))if((f|0)==-1){l=0;break}else{B=l,m=f,_e=135;break e}if(s=n[2903]|0,s=Q-l+s&0-s,s>>>0>=2147483647){B=l,m=f,_e=135;break e}if((bp(s|0)|0)==-1){bp(c|0)|0,l=0;break}else{B=s+l|0,m=f,_e=135;break e}}while(!1);n[2894]=n[2894]|4,_e=133}while(!1);if((_e|0)==133&&O>>>0<2147483647&&(lt=bp(O|0)|0,Qe=bp(0)|0,$e=Qe-lt|0,Je=$e>>>0>(j+40|0)>>>0,!((lt|0)==-1|Je^1|lt>>>0>>0&((lt|0)!=-1&(Qe|0)!=-1)^1))&&(B=Je?$e:l,m=lt,_e=135),(_e|0)==135){l=(n[2891]|0)+B|0,n[2891]=l,l>>>0>(n[2892]|0)>>>0&&(n[2892]=l),Q=n[2789]|0;do if(Q){for(l=11580;;){if(s=n[l>>2]|0,c=l+4|0,f=n[c>>2]|0,(m|0)==(s+f|0)){_e=145;break}if(d=n[l+8>>2]|0,d)l=d;else break}if((_e|0)==145&&!(n[l+12>>2]&8|0)&&Q>>>0>>0&Q>>>0>=s>>>0){n[c>>2]=f+B,qe=Q+8|0,qe=qe&7|0?0-qe&7:0,_e=Q+qe|0,qe=(n[2786]|0)+(B-qe)|0,n[2789]=_e,n[2786]=qe,n[_e+4>>2]=qe|1,n[_e+qe+4>>2]=40,n[2790]=n[2905];break}for(m>>>0<(n[2787]|0)>>>0&&(n[2787]=m),c=m+B|0,l=11580;;){if((n[l>>2]|0)==(c|0)){_e=153;break}if(s=n[l+8>>2]|0,s)l=s;else break}if((_e|0)==153&&!(n[l+12>>2]&8|0)){n[l>>2]=m,M=l+4|0,n[M>>2]=(n[M>>2]|0)+B,M=m+8|0,M=m+(M&7|0?0-M&7:0)|0,l=c+8|0,l=c+(l&7|0?0-l&7:0)|0,O=M+j|0,k=l-M-j|0,n[M+4>>2]=j|3;do if((l|0)!=(Q|0)){if((l|0)==(n[2788]|0)){qe=(n[2785]|0)+k|0,n[2785]=qe,n[2788]=O,n[O+4>>2]=qe|1,n[O+qe>>2]=qe;break}if(s=n[l+4>>2]|0,(s&3|0)==1){B=s&-8,f=s>>>3;e:do if(s>>>0<256)if(s=n[l+8>>2]|0,c=n[l+12>>2]|0,(c|0)==(s|0)){n[2783]=n[2783]&~(1<>2]=c,n[c+8>>2]=s;break}else{m=n[l+24>>2]|0,s=n[l+12>>2]|0;do if((s|0)==(l|0)){if(f=l+16|0,c=f+4|0,s=n[c>>2]|0,!s)if(s=n[f>>2]|0,s)c=f;else{s=0;break}for(;;){if(f=s+20|0,d=n[f>>2]|0,d|0){s=d,c=f;continue}if(f=s+16|0,d=n[f>>2]|0,d)s=d,c=f;else break}n[c>>2]=0}else qe=n[l+8>>2]|0,n[qe+12>>2]=s,n[s+8>>2]=qe;while(!1);if(!m)break;c=n[l+28>>2]|0,f=11436+(c<<2)|0;do if((l|0)!=(n[f>>2]|0)){if(n[m+16+(((n[m+16>>2]|0)!=(l|0)&1)<<2)>>2]=s,!s)break e}else{if(n[f>>2]=s,s|0)break;n[2784]=n[2784]&~(1<>2]=m,c=l+16|0,f=n[c>>2]|0,f|0&&(n[s+16>>2]=f,n[f+24>>2]=s),c=n[c+4>>2]|0,!c)break;n[s+20>>2]=c,n[c+24>>2]=s}while(!1);l=l+B|0,d=B+k|0}else d=k;if(l=l+4|0,n[l>>2]=n[l>>2]&-2,n[O+4>>2]=d|1,n[O+d>>2]=d,l=d>>>3,d>>>0<256){c=11172+(l<<1<<2)|0,s=n[2783]|0,l=1<>2]|0):(n[2783]=s|l,l=c,s=c+8|0),n[s>>2]=O,n[l+12>>2]=O,n[O+8>>2]=l,n[O+12>>2]=c;break}l=d>>>8;do if(!l)l=0;else{if(d>>>0>16777215){l=31;break}_e=(l+1048320|0)>>>16&8,qe=l<<_e,lt=(qe+520192|0)>>>16&4,qe=qe<>>16&2,l=14-(lt|_e|l)+(qe<>>15)|0,l=d>>>(l+7|0)&1|l<<1}while(!1);if(f=11436+(l<<2)|0,n[O+28>>2]=l,s=O+16|0,n[s+4>>2]=0,n[s>>2]=0,s=n[2784]|0,c=1<>2]=O,n[O+24>>2]=f,n[O+12>>2]=O,n[O+8>>2]=O;break}for(s=d<<((l|0)==31?0:25-(l>>>1)|0),c=n[f>>2]|0;;){if((n[c+4>>2]&-8|0)==(d|0)){_e=194;break}if(f=c+16+(s>>>31<<2)|0,l=n[f>>2]|0,l)s=s<<1,c=l;else{_e=193;break}}if((_e|0)==193){n[f>>2]=O,n[O+24>>2]=c,n[O+12>>2]=O,n[O+8>>2]=O;break}else if((_e|0)==194){_e=c+8|0,qe=n[_e>>2]|0,n[qe+12>>2]=O,n[_e>>2]=O,n[O+8>>2]=qe,n[O+12>>2]=c,n[O+24>>2]=0;break}}else qe=(n[2786]|0)+k|0,n[2786]=qe,n[2789]=O,n[O+4>>2]=qe|1;while(!1);return qe=M+8|0,C=Lt,qe|0}for(l=11580;s=n[l>>2]|0,!(s>>>0<=Q>>>0&&(qe=s+(n[l+4>>2]|0)|0,qe>>>0>Q>>>0));)l=n[l+8>>2]|0;d=qe+-47|0,s=d+8|0,s=d+(s&7|0?0-s&7:0)|0,d=Q+16|0,s=s>>>0>>0?Q:s,l=s+8|0,c=m+8|0,c=c&7|0?0-c&7:0,_e=m+c|0,c=B+-40-c|0,n[2789]=_e,n[2786]=c,n[_e+4>>2]=c|1,n[_e+c+4>>2]=40,n[2790]=n[2905],c=s+4|0,n[c>>2]=27,n[l>>2]=n[2895],n[l+4>>2]=n[2896],n[l+8>>2]=n[2897],n[l+12>>2]=n[2898],n[2895]=m,n[2896]=B,n[2898]=0,n[2897]=l,l=s+24|0;do _e=l,l=l+4|0,n[l>>2]=7;while((_e+8|0)>>>0>>0);if((s|0)!=(Q|0)){if(m=s-Q|0,n[c>>2]=n[c>>2]&-2,n[Q+4>>2]=m|1,n[s>>2]=m,l=m>>>3,m>>>0<256){c=11172+(l<<1<<2)|0,s=n[2783]|0,l=1<>2]|0):(n[2783]=s|l,l=c,s=c+8|0),n[s>>2]=Q,n[l+12>>2]=Q,n[Q+8>>2]=l,n[Q+12>>2]=c;break}if(l=m>>>8,l?m>>>0>16777215?c=31:(_e=(l+1048320|0)>>>16&8,qe=l<<_e,lt=(qe+520192|0)>>>16&4,qe=qe<>>16&2,c=14-(lt|_e|c)+(qe<>>15)|0,c=m>>>(c+7|0)&1|c<<1):c=0,f=11436+(c<<2)|0,n[Q+28>>2]=c,n[Q+20>>2]=0,n[d>>2]=0,l=n[2784]|0,s=1<>2]=Q,n[Q+24>>2]=f,n[Q+12>>2]=Q,n[Q+8>>2]=Q;break}for(s=m<<((c|0)==31?0:25-(c>>>1)|0),c=n[f>>2]|0;;){if((n[c+4>>2]&-8|0)==(m|0)){_e=216;break}if(f=c+16+(s>>>31<<2)|0,l=n[f>>2]|0,l)s=s<<1,c=l;else{_e=215;break}}if((_e|0)==215){n[f>>2]=Q,n[Q+24>>2]=c,n[Q+12>>2]=Q,n[Q+8>>2]=Q;break}else if((_e|0)==216){_e=c+8|0,qe=n[_e>>2]|0,n[qe+12>>2]=Q,n[_e>>2]=Q,n[Q+8>>2]=qe,n[Q+12>>2]=c,n[Q+24>>2]=0;break}}}else{qe=n[2787]|0,(qe|0)==0|m>>>0>>0&&(n[2787]=m),n[2895]=m,n[2896]=B,n[2898]=0,n[2792]=n[2901],n[2791]=-1,l=0;do qe=11172+(l<<1<<2)|0,n[qe+12>>2]=qe,n[qe+8>>2]=qe,l=l+1|0;while((l|0)!=32);qe=m+8|0,qe=qe&7|0?0-qe&7:0,_e=m+qe|0,qe=B+-40-qe|0,n[2789]=_e,n[2786]=qe,n[_e+4>>2]=qe|1,n[_e+qe+4>>2]=40,n[2790]=n[2905]}while(!1);if(l=n[2786]|0,l>>>0>j>>>0)return lt=l-j|0,n[2786]=lt,qe=n[2789]|0,_e=qe+j|0,n[2789]=_e,n[_e+4>>2]=lt|1,n[qe+4>>2]=j|3,qe=qe+8|0,C=Lt,qe|0}return n[(Nd()|0)>>2]=12,qe=0,C=Lt,qe|0}function Xv(s){s=s|0;var l=0,c=0,f=0,d=0,m=0,B=0,k=0,Q=0;if(s){c=s+-8|0,d=n[2787]|0,s=n[s+-4>>2]|0,l=s&-8,Q=c+l|0;do if(s&1)k=c,B=c;else{if(f=n[c>>2]|0,!(s&3)||(B=c+(0-f)|0,m=f+l|0,B>>>0>>0))return;if((B|0)==(n[2788]|0)){if(s=Q+4|0,l=n[s>>2]|0,(l&3|0)!=3){k=B,l=m;break}n[2785]=m,n[s>>2]=l&-2,n[B+4>>2]=m|1,n[B+m>>2]=m;return}if(c=f>>>3,f>>>0<256)if(s=n[B+8>>2]|0,l=n[B+12>>2]|0,(l|0)==(s|0)){n[2783]=n[2783]&~(1<>2]=l,n[l+8>>2]=s,k=B,l=m;break}d=n[B+24>>2]|0,s=n[B+12>>2]|0;do if((s|0)==(B|0)){if(c=B+16|0,l=c+4|0,s=n[l>>2]|0,!s)if(s=n[c>>2]|0,s)l=c;else{s=0;break}for(;;){if(c=s+20|0,f=n[c>>2]|0,f|0){s=f,l=c;continue}if(c=s+16|0,f=n[c>>2]|0,f)s=f,l=c;else break}n[l>>2]=0}else k=n[B+8>>2]|0,n[k+12>>2]=s,n[s+8>>2]=k;while(!1);if(d){if(l=n[B+28>>2]|0,c=11436+(l<<2)|0,(B|0)==(n[c>>2]|0)){if(n[c>>2]=s,!s){n[2784]=n[2784]&~(1<>2]|0)!=(B|0)&1)<<2)>>2]=s,!s){k=B,l=m;break}n[s+24>>2]=d,l=B+16|0,c=n[l>>2]|0,c|0&&(n[s+16>>2]=c,n[c+24>>2]=s),l=n[l+4>>2]|0,l?(n[s+20>>2]=l,n[l+24>>2]=s,k=B,l=m):(k=B,l=m)}else k=B,l=m}while(!1);if(!(B>>>0>=Q>>>0)&&(s=Q+4|0,f=n[s>>2]|0,!!(f&1))){if(f&2)n[s>>2]=f&-2,n[k+4>>2]=l|1,n[B+l>>2]=l,d=l;else{if(s=n[2788]|0,(Q|0)==(n[2789]|0)){if(Q=(n[2786]|0)+l|0,n[2786]=Q,n[2789]=k,n[k+4>>2]=Q|1,(k|0)!=(s|0))return;n[2788]=0,n[2785]=0;return}if((Q|0)==(s|0)){Q=(n[2785]|0)+l|0,n[2785]=Q,n[2788]=B,n[k+4>>2]=Q|1,n[B+Q>>2]=Q;return}d=(f&-8)+l|0,c=f>>>3;do if(f>>>0<256)if(l=n[Q+8>>2]|0,s=n[Q+12>>2]|0,(s|0)==(l|0)){n[2783]=n[2783]&~(1<>2]=s,n[s+8>>2]=l;break}else{m=n[Q+24>>2]|0,s=n[Q+12>>2]|0;do if((s|0)==(Q|0)){if(c=Q+16|0,l=c+4|0,s=n[l>>2]|0,!s)if(s=n[c>>2]|0,s)l=c;else{c=0;break}for(;;){if(c=s+20|0,f=n[c>>2]|0,f|0){s=f,l=c;continue}if(c=s+16|0,f=n[c>>2]|0,f)s=f,l=c;else break}n[l>>2]=0,c=s}else c=n[Q+8>>2]|0,n[c+12>>2]=s,n[s+8>>2]=c,c=s;while(!1);if(m|0){if(s=n[Q+28>>2]|0,l=11436+(s<<2)|0,(Q|0)==(n[l>>2]|0)){if(n[l>>2]=c,!c){n[2784]=n[2784]&~(1<>2]|0)!=(Q|0)&1)<<2)>>2]=c,!c)break;n[c+24>>2]=m,s=Q+16|0,l=n[s>>2]|0,l|0&&(n[c+16>>2]=l,n[l+24>>2]=c),s=n[s+4>>2]|0,s|0&&(n[c+20>>2]=s,n[s+24>>2]=c)}}while(!1);if(n[k+4>>2]=d|1,n[B+d>>2]=d,(k|0)==(n[2788]|0)){n[2785]=d;return}}if(s=d>>>3,d>>>0<256){c=11172+(s<<1<<2)|0,l=n[2783]|0,s=1<>2]|0):(n[2783]=l|s,s=c,l=c+8|0),n[l>>2]=k,n[s+12>>2]=k,n[k+8>>2]=s,n[k+12>>2]=c;return}s=d>>>8,s?d>>>0>16777215?s=31:(B=(s+1048320|0)>>>16&8,Q=s<>>16&4,Q=Q<>>16&2,s=14-(m|B|s)+(Q<>>15)|0,s=d>>>(s+7|0)&1|s<<1):s=0,f=11436+(s<<2)|0,n[k+28>>2]=s,n[k+20>>2]=0,n[k+16>>2]=0,l=n[2784]|0,c=1<>>1)|0),c=n[f>>2]|0;;){if((n[c+4>>2]&-8|0)==(d|0)){s=73;break}if(f=c+16+(l>>>31<<2)|0,s=n[f>>2]|0,s)l=l<<1,c=s;else{s=72;break}}if((s|0)==72){n[f>>2]=k,n[k+24>>2]=c,n[k+12>>2]=k,n[k+8>>2]=k;break}else if((s|0)==73){B=c+8|0,Q=n[B>>2]|0,n[Q+12>>2]=k,n[B>>2]=k,n[k+8>>2]=Q,n[k+12>>2]=c,n[k+24>>2]=0;break}}else n[2784]=l|c,n[f>>2]=k,n[k+24>>2]=f,n[k+12>>2]=k,n[k+8>>2]=k;while(!1);if(Q=(n[2791]|0)+-1|0,n[2791]=Q,!Q)s=11588;else return;for(;s=n[s>>2]|0,s;)s=s+8|0;n[2791]=-1}}}function bUe(){return 11628}function xUe(s){s=s|0;var l=0,c=0;return l=C,C=C+16|0,c=l,n[c>>2]=FUe(n[s+60>>2]|0)|0,s=Zv(gc(6,c|0)|0)|0,C=l,s|0}function Z9(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0;j=C,C=C+48|0,O=j+16|0,m=j,d=j+32|0,k=s+28|0,f=n[k>>2]|0,n[d>>2]=f,Q=s+20|0,f=(n[Q>>2]|0)-f|0,n[d+4>>2]=f,n[d+8>>2]=l,n[d+12>>2]=c,f=f+c|0,B=s+60|0,n[m>>2]=n[B>>2],n[m+4>>2]=d,n[m+8>>2]=2,m=Zv(Ni(146,m|0)|0)|0;e:do if((f|0)!=(m|0)){for(l=2;!((m|0)<0);)if(f=f-m|0,je=n[d+4>>2]|0,se=m>>>0>je>>>0,d=se?d+8|0:d,l=(se<<31>>31)+l|0,je=m-(se?je:0)|0,n[d>>2]=(n[d>>2]|0)+je,se=d+4|0,n[se>>2]=(n[se>>2]|0)-je,n[O>>2]=n[B>>2],n[O+4>>2]=d,n[O+8>>2]=l,m=Zv(Ni(146,O|0)|0)|0,(f|0)==(m|0)){M=3;break e}n[s+16>>2]=0,n[k>>2]=0,n[Q>>2]=0,n[s>>2]=n[s>>2]|32,(l|0)==2?c=0:c=c-(n[d+4>>2]|0)|0}else M=3;while(!1);return(M|0)==3&&(je=n[s+44>>2]|0,n[s+16>>2]=je+(n[s+48>>2]|0),n[k>>2]=je,n[Q>>2]=je),C=j,c|0}function kUe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;return d=C,C=C+32|0,m=d,f=d+20|0,n[m>>2]=n[s+60>>2],n[m+4>>2]=0,n[m+8>>2]=l,n[m+12>>2]=f,n[m+16>>2]=c,(Zv(aa(140,m|0)|0)|0)<0?(n[f>>2]=-1,s=-1):s=n[f>>2]|0,C=d,s|0}function Zv(s){return s=s|0,s>>>0>4294963200&&(n[(Nd()|0)>>2]=0-s,s=-1),s|0}function Nd(){return(QUe()|0)+64|0}function QUe(){return gR()|0}function gR(){return 2084}function FUe(s){return s=s|0,s|0}function RUe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;return d=C,C=C+32|0,f=d,n[s+36>>2]=1,!(n[s>>2]&64|0)&&(n[f>>2]=n[s+60>>2],n[f+4>>2]=21523,n[f+8>>2]=d+16,hu(54,f|0)|0)&&(o[s+75>>0]=-1),f=Z9(s,l,c)|0,C=d,f|0}function $9(s,l){s=s|0,l=l|0;var c=0,f=0;if(c=o[s>>0]|0,f=o[l>>0]|0,!(c<<24>>24)||c<<24>>24!=f<<24>>24)s=f;else{do s=s+1|0,l=l+1|0,c=o[s>>0]|0,f=o[l>>0]|0;while(!(!(c<<24>>24)||c<<24>>24!=f<<24>>24));s=f}return(c&255)-(s&255)|0}function TUe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0;e:do if(!c)s=0;else{for(;f=o[s>>0]|0,d=o[l>>0]|0,f<<24>>24==d<<24>>24;)if(c=c+-1|0,c)s=s+1|0,l=l+1|0;else{s=0;break e}s=(f&255)-(d&255)|0}while(!1);return s|0}function e7(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0;Qe=C,C=C+224|0,M=Qe+120|0,j=Qe+80|0,je=Qe,Oe=Qe+136|0,f=j,d=f+40|0;do n[f>>2]=0,f=f+4|0;while((f|0)<(d|0));return n[M>>2]=n[c>>2],(dR(0,l,M,je,j)|0)<0?c=-1:((n[s+76>>2]|0)>-1?se=NUe(s)|0:se=0,c=n[s>>2]|0,O=c&32,(o[s+74>>0]|0)<1&&(n[s>>2]=c&-33),f=s+48|0,n[f>>2]|0?c=dR(s,l,M,je,j)|0:(d=s+44|0,m=n[d>>2]|0,n[d>>2]=Oe,B=s+28|0,n[B>>2]=Oe,k=s+20|0,n[k>>2]=Oe,n[f>>2]=80,Q=s+16|0,n[Q>>2]=Oe+80,c=dR(s,l,M,je,j)|0,m&&(rD[n[s+36>>2]&7](s,0,0)|0,c=n[k>>2]|0?c:-1,n[d>>2]=m,n[f>>2]=0,n[Q>>2]=0,n[B>>2]=0,n[k>>2]=0)),f=n[s>>2]|0,n[s>>2]=f|O,se|0&&LUe(s),c=f&32|0?-1:c),C=Qe,c|0}function dR(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0,lt=0,_e=0,qe=0,Lt=0,Or=0,cr=0,Xt=0,Pr=0,Tr=0,ar=0;ar=C,C=C+64|0,cr=ar+16|0,Xt=ar,Lt=ar+24|0,Pr=ar+8|0,Tr=ar+20|0,n[cr>>2]=l,lt=(s|0)!=0,_e=Lt+40|0,qe=_e,Lt=Lt+39|0,Or=Pr+4|0,B=0,m=0,M=0;e:for(;;){do if((m|0)>-1)if((B|0)>(2147483647-m|0)){n[(Nd()|0)>>2]=75,m=-1;break}else{m=B+m|0;break}while(!1);if(B=o[l>>0]|0,B<<24>>24)k=l;else{Je=87;break}t:for(;;){switch(B<<24>>24){case 37:{B=k,Je=9;break t}case 0:{B=k;break t}default:}$e=k+1|0,n[cr>>2]=$e,B=o[$e>>0]|0,k=$e}t:do if((Je|0)==9)for(;;){if(Je=0,(o[k+1>>0]|0)!=37)break t;if(B=B+1|0,k=k+2|0,n[cr>>2]=k,(o[k>>0]|0)==37)Je=9;else break}while(!1);if(B=B-l|0,lt&&as(s,l,B),B|0){l=k;continue}Q=k+1|0,B=(o[Q>>0]|0)+-48|0,B>>>0<10?($e=(o[k+2>>0]|0)==36,Qe=$e?B:-1,M=$e?1:M,Q=$e?k+3|0:Q):Qe=-1,n[cr>>2]=Q,B=o[Q>>0]|0,k=(B<<24>>24)+-32|0;t:do if(k>>>0<32)for(O=0,j=B;;){if(B=1<>2]=Q,B=o[Q>>0]|0,k=(B<<24>>24)+-32|0,k>>>0>=32)break;j=B}else O=0;while(!1);if(B<<24>>24==42){if(k=Q+1|0,B=(o[k>>0]|0)+-48|0,B>>>0<10&&(o[Q+2>>0]|0)==36)n[d+(B<<2)>>2]=10,B=n[f+((o[k>>0]|0)+-48<<3)>>2]|0,M=1,Q=Q+3|0;else{if(M|0){m=-1;break}lt?(M=(n[c>>2]|0)+3&-4,B=n[M>>2]|0,n[c>>2]=M+4,M=0,Q=k):(B=0,M=0,Q=k)}n[cr>>2]=Q,$e=(B|0)<0,B=$e?0-B|0:B,O=$e?O|8192:O}else{if(B=t7(cr)|0,(B|0)<0){m=-1;break}Q=n[cr>>2]|0}do if((o[Q>>0]|0)==46){if((o[Q+1>>0]|0)!=42){n[cr>>2]=Q+1,k=t7(cr)|0,Q=n[cr>>2]|0;break}if(j=Q+2|0,k=(o[j>>0]|0)+-48|0,k>>>0<10&&(o[Q+3>>0]|0)==36){n[d+(k<<2)>>2]=10,k=n[f+((o[j>>0]|0)+-48<<3)>>2]|0,Q=Q+4|0,n[cr>>2]=Q;break}if(M|0){m=-1;break e}lt?($e=(n[c>>2]|0)+3&-4,k=n[$e>>2]|0,n[c>>2]=$e+4):k=0,n[cr>>2]=j,Q=j}else k=-1;while(!1);for(Oe=0;;){if(((o[Q>>0]|0)+-65|0)>>>0>57){m=-1;break e}if($e=Q+1|0,n[cr>>2]=$e,j=o[(o[Q>>0]|0)+-65+(5178+(Oe*58|0))>>0]|0,se=j&255,(se+-1|0)>>>0<8)Oe=se,Q=$e;else break}if(!(j<<24>>24)){m=-1;break}je=(Qe|0)>-1;do if(j<<24>>24==19)if(je){m=-1;break e}else Je=49;else{if(je){n[d+(Qe<<2)>>2]=se,je=f+(Qe<<3)|0,Qe=n[je+4>>2]|0,Je=Xt,n[Je>>2]=n[je>>2],n[Je+4>>2]=Qe,Je=49;break}if(!lt){m=0;break e}r7(Xt,se,c)}while(!1);if((Je|0)==49&&(Je=0,!lt)){B=0,l=$e;continue}Q=o[Q>>0]|0,Q=(Oe|0)!=0&(Q&15|0)==3?Q&-33:Q,je=O&-65537,Qe=O&8192|0?je:O;t:do switch(Q|0){case 110:switch((Oe&255)<<24>>24){case 0:{n[n[Xt>>2]>>2]=m,B=0,l=$e;continue e}case 1:{n[n[Xt>>2]>>2]=m,B=0,l=$e;continue e}case 2:{B=n[Xt>>2]|0,n[B>>2]=m,n[B+4>>2]=((m|0)<0)<<31>>31,B=0,l=$e;continue e}case 3:{a[n[Xt>>2]>>1]=m,B=0,l=$e;continue e}case 4:{o[n[Xt>>2]>>0]=m,B=0,l=$e;continue e}case 6:{n[n[Xt>>2]>>2]=m,B=0,l=$e;continue e}case 7:{B=n[Xt>>2]|0,n[B>>2]=m,n[B+4>>2]=((m|0)<0)<<31>>31,B=0,l=$e;continue e}default:{B=0,l=$e;continue e}}case 112:{Q=120,k=k>>>0>8?k:8,l=Qe|8,Je=61;break}case 88:case 120:{l=Qe,Je=61;break}case 111:{Q=Xt,l=n[Q>>2]|0,Q=n[Q+4>>2]|0,se=OUe(l,Q,_e)|0,je=qe-se|0,O=0,j=5642,k=(Qe&8|0)==0|(k|0)>(je|0)?k:je+1|0,je=Qe,Je=67;break}case 105:case 100:if(Q=Xt,l=n[Q>>2]|0,Q=n[Q+4>>2]|0,(Q|0)<0){l=$v(0,0,l|0,Q|0)|0,Q=we,O=Xt,n[O>>2]=l,n[O+4>>2]=Q,O=1,j=5642,Je=66;break t}else{O=(Qe&2049|0)!=0&1,j=Qe&2048|0?5643:Qe&1|0?5644:5642,Je=66;break t}case 117:{Q=Xt,O=0,j=5642,l=n[Q>>2]|0,Q=n[Q+4>>2]|0,Je=66;break}case 99:{o[Lt>>0]=n[Xt>>2],l=Lt,O=0,j=5642,se=_e,Q=1,k=je;break}case 109:{Q=UUe(n[(Nd()|0)>>2]|0)|0,Je=71;break}case 115:{Q=n[Xt>>2]|0,Q=Q|0?Q:5652,Je=71;break}case 67:{n[Pr>>2]=n[Xt>>2],n[Or>>2]=0,n[Xt>>2]=Pr,se=-1,Q=Pr,Je=75;break}case 83:{l=n[Xt>>2]|0,k?(se=k,Q=l,Je=75):(Ds(s,32,B,0,Qe),l=0,Je=84);break}case 65:case 71:case 70:case 69:case 97:case 103:case 102:case 101:{B=HUe(s,+E[Xt>>3],B,k,Qe,Q)|0,l=$e;continue e}default:O=0,j=5642,se=_e,Q=k,k=Qe}while(!1);t:do if((Je|0)==61)Qe=Xt,Oe=n[Qe>>2]|0,Qe=n[Qe+4>>2]|0,se=MUe(Oe,Qe,_e,Q&32)|0,j=(l&8|0)==0|(Oe|0)==0&(Qe|0)==0,O=j?0:2,j=j?5642:5642+(Q>>4)|0,je=l,l=Oe,Q=Qe,Je=67;else if((Je|0)==66)se=Ld(l,Q,_e)|0,je=Qe,Je=67;else if((Je|0)==71)Je=0,Qe=_Ue(Q,0,k)|0,Oe=(Qe|0)==0,l=Q,O=0,j=5642,se=Oe?Q+k|0:Qe,Q=Oe?k:Qe-Q|0,k=je;else if((Je|0)==75){for(Je=0,j=Q,l=0,k=0;O=n[j>>2]|0,!(!O||(k=n7(Tr,O)|0,(k|0)<0|k>>>0>(se-l|0)>>>0));)if(l=k+l|0,se>>>0>l>>>0)j=j+4|0;else break;if((k|0)<0){m=-1;break e}if(Ds(s,32,B,l,Qe),!l)l=0,Je=84;else for(O=0;;){if(k=n[Q>>2]|0,!k){Je=84;break t}if(k=n7(Tr,k)|0,O=k+O|0,(O|0)>(l|0)){Je=84;break t}if(as(s,Tr,k),O>>>0>=l>>>0){Je=84;break}else Q=Q+4|0}}while(!1);if((Je|0)==67)Je=0,Q=(l|0)!=0|(Q|0)!=0,Qe=(k|0)!=0|Q,Q=((Q^1)&1)+(qe-se)|0,l=Qe?se:_e,se=_e,Q=Qe?(k|0)>(Q|0)?k:Q:k,k=(k|0)>-1?je&-65537:je;else if((Je|0)==84){Je=0,Ds(s,32,B,l,Qe^8192),B=(B|0)>(l|0)?B:l,l=$e;continue}Oe=se-l|0,je=(Q|0)<(Oe|0)?Oe:Q,Qe=je+O|0,B=(B|0)<(Qe|0)?Qe:B,Ds(s,32,B,Qe,k),as(s,j,O),Ds(s,48,B,Qe,k^65536),Ds(s,48,je,Oe,0),as(s,l,Oe),Ds(s,32,B,Qe,k^8192),l=$e}e:do if((Je|0)==87&&!s)if(!M)m=0;else{for(m=1;l=n[d+(m<<2)>>2]|0,!!l;)if(r7(f+(m<<3)|0,l,c),m=m+1|0,(m|0)>=10){m=1;break e}for(;;){if(n[d+(m<<2)>>2]|0){m=-1;break e}if(m=m+1|0,(m|0)>=10){m=1;break}}}while(!1);return C=ar,m|0}function NUe(s){return s=s|0,0}function LUe(s){s=s|0}function as(s,l,c){s=s|0,l=l|0,c=c|0,n[s>>2]&32||JUe(l,c,s)|0}function t7(s){s=s|0;var l=0,c=0,f=0;if(c=n[s>>2]|0,f=(o[c>>0]|0)+-48|0,f>>>0<10){l=0;do l=f+(l*10|0)|0,c=c+1|0,n[s>>2]=c,f=(o[c>>0]|0)+-48|0;while(f>>>0<10)}else l=0;return l|0}function r7(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;e:do if(l>>>0<=20)do switch(l|0){case 9:{f=(n[c>>2]|0)+3&-4,l=n[f>>2]|0,n[c>>2]=f+4,n[s>>2]=l;break e}case 10:{f=(n[c>>2]|0)+3&-4,l=n[f>>2]|0,n[c>>2]=f+4,f=s,n[f>>2]=l,n[f+4>>2]=((l|0)<0)<<31>>31;break e}case 11:{f=(n[c>>2]|0)+3&-4,l=n[f>>2]|0,n[c>>2]=f+4,f=s,n[f>>2]=l,n[f+4>>2]=0;break e}case 12:{f=(n[c>>2]|0)+7&-8,l=f,d=n[l>>2]|0,l=n[l+4>>2]|0,n[c>>2]=f+8,f=s,n[f>>2]=d,n[f+4>>2]=l;break e}case 13:{d=(n[c>>2]|0)+3&-4,f=n[d>>2]|0,n[c>>2]=d+4,f=(f&65535)<<16>>16,d=s,n[d>>2]=f,n[d+4>>2]=((f|0)<0)<<31>>31;break e}case 14:{d=(n[c>>2]|0)+3&-4,f=n[d>>2]|0,n[c>>2]=d+4,d=s,n[d>>2]=f&65535,n[d+4>>2]=0;break e}case 15:{d=(n[c>>2]|0)+3&-4,f=n[d>>2]|0,n[c>>2]=d+4,f=(f&255)<<24>>24,d=s,n[d>>2]=f,n[d+4>>2]=((f|0)<0)<<31>>31;break e}case 16:{d=(n[c>>2]|0)+3&-4,f=n[d>>2]|0,n[c>>2]=d+4,d=s,n[d>>2]=f&255,n[d+4>>2]=0;break e}case 17:{d=(n[c>>2]|0)+7&-8,m=+E[d>>3],n[c>>2]=d+8,E[s>>3]=m;break e}case 18:{d=(n[c>>2]|0)+7&-8,m=+E[d>>3],n[c>>2]=d+8,E[s>>3]=m;break e}default:break e}while(!1);while(!1)}function MUe(s,l,c,f){if(s=s|0,l=l|0,c=c|0,f=f|0,!((s|0)==0&(l|0)==0))do c=c+-1|0,o[c>>0]=u[5694+(s&15)>>0]|0|f,s=eD(s|0,l|0,4)|0,l=we;while(!((s|0)==0&(l|0)==0));return c|0}function OUe(s,l,c){if(s=s|0,l=l|0,c=c|0,!((s|0)==0&(l|0)==0))do c=c+-1|0,o[c>>0]=s&7|48,s=eD(s|0,l|0,3)|0,l=we;while(!((s|0)==0&(l|0)==0));return c|0}function Ld(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;if(l>>>0>0|(l|0)==0&s>>>0>4294967295){for(;f=CR(s|0,l|0,10,0)|0,c=c+-1|0,o[c>>0]=f&255|48,f=s,s=ER(s|0,l|0,10,0)|0,l>>>0>9|(l|0)==9&f>>>0>4294967295;)l=we;l=s}else l=s;if(l)for(;c=c+-1|0,o[c>>0]=(l>>>0)%10|0|48,!(l>>>0<10);)l=(l>>>0)/10|0;return c|0}function UUe(s){return s=s|0,WUe(s,n[(YUe()|0)+188>>2]|0)|0}function _Ue(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;m=l&255,f=(c|0)!=0;e:do if(f&(s&3|0)!=0)for(d=l&255;;){if((o[s>>0]|0)==d<<24>>24){B=6;break e}if(s=s+1|0,c=c+-1|0,f=(c|0)!=0,!(f&(s&3|0)!=0)){B=5;break}}else B=5;while(!1);(B|0)==5&&(f?B=6:c=0);e:do if((B|0)==6&&(d=l&255,(o[s>>0]|0)!=d<<24>>24)){f=Ue(m,16843009)|0;t:do if(c>>>0>3){for(;m=n[s>>2]^f,!((m&-2139062144^-2139062144)&m+-16843009|0);)if(s=s+4|0,c=c+-4|0,c>>>0<=3){B=11;break t}}else B=11;while(!1);if((B|0)==11&&!c){c=0;break}for(;;){if((o[s>>0]|0)==d<<24>>24)break e;if(s=s+1|0,c=c+-1|0,!c){c=0;break}}}while(!1);return(c|0?s:0)|0}function Ds(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0;if(B=C,C=C+256|0,m=B,(c|0)>(f|0)&(d&73728|0)==0){if(d=c-f|0,Od(m|0,l|0,(d>>>0<256?d:256)|0)|0,d>>>0>255){l=c-f|0;do as(s,m,256),d=d+-256|0;while(d>>>0>255);d=l&255}as(s,m,d)}C=B}function n7(s,l){return s=s|0,l=l|0,s?s=jUe(s,l,0)|0:s=0,s|0}function HUe(s,l,c,f,d,m){s=s|0,l=+l,c=c|0,f=f|0,d=d|0,m=m|0;var B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0,Qe=0,$e=0,Je=0,lt=0,_e=0,qe=0,Lt=0,Or=0,cr=0,Xt=0,Pr=0,Tr=0,ar=0,xn=0;xn=C,C=C+560|0,Q=xn+8|0,$e=xn,ar=xn+524|0,Tr=ar,O=xn+512|0,n[$e>>2]=0,Pr=O+12|0,i7(l)|0,(we|0)<0?(l=-l,cr=1,Or=5659):(cr=(d&2049|0)!=0&1,Or=d&2048|0?5662:d&1|0?5665:5660),i7(l)|0,Xt=we&2146435072;do if(Xt>>>0<2146435072|(Xt|0)==2146435072&!1){if(je=+qUe(l,$e)*2,B=je!=0,B&&(n[$e>>2]=(n[$e>>2]|0)+-1),lt=m|32,(lt|0)==97){Oe=m&32,se=Oe|0?Or+9|0:Or,j=cr|2,B=12-f|0;do if(f>>>0>11|(B|0)==0)l=je;else{l=8;do B=B+-1|0,l=l*16;while(B|0);if((o[se>>0]|0)==45){l=-(l+(-je-l));break}else{l=je+l-l;break}}while(!1);k=n[$e>>2]|0,B=(k|0)<0?0-k|0:k,B=Ld(B,((B|0)<0)<<31>>31,Pr)|0,(B|0)==(Pr|0)&&(B=O+11|0,o[B>>0]=48),o[B+-1>>0]=(k>>31&2)+43,M=B+-2|0,o[M>>0]=m+15,O=(f|0)<1,Q=(d&8|0)==0,B=ar;do Xt=~~l,k=B+1|0,o[B>>0]=u[5694+Xt>>0]|Oe,l=(l-+(Xt|0))*16,(k-Tr|0)==1&&!(Q&(O&l==0))?(o[k>>0]=46,B=B+2|0):B=k;while(l!=0);Xt=B-Tr|0,Tr=Pr-M|0,Pr=(f|0)!=0&(Xt+-2|0)<(f|0)?f+2|0:Xt,B=Tr+j+Pr|0,Ds(s,32,c,B,d),as(s,se,j),Ds(s,48,c,B,d^65536),as(s,ar,Xt),Ds(s,48,Pr-Xt|0,0,0),as(s,M,Tr),Ds(s,32,c,B,d^8192);break}k=(f|0)<0?6:f,B?(B=(n[$e>>2]|0)+-28|0,n[$e>>2]=B,l=je*268435456):(l=je,B=n[$e>>2]|0),Xt=(B|0)<0?Q:Q+288|0,Q=Xt;do qe=~~l>>>0,n[Q>>2]=qe,Q=Q+4|0,l=(l-+(qe>>>0))*1e9;while(l!=0);if((B|0)>0)for(O=Xt,j=Q;;){if(M=(B|0)<29?B:29,B=j+-4|0,B>>>0>=O>>>0){Q=0;do _e=u7(n[B>>2]|0,0,M|0)|0,_e=yR(_e|0,we|0,Q|0,0)|0,qe=we,Je=CR(_e|0,qe|0,1e9,0)|0,n[B>>2]=Je,Q=ER(_e|0,qe|0,1e9,0)|0,B=B+-4|0;while(B>>>0>=O>>>0);Q&&(O=O+-4|0,n[O>>2]=Q)}for(Q=j;!(Q>>>0<=O>>>0);)if(B=Q+-4|0,!(n[B>>2]|0))Q=B;else break;if(B=(n[$e>>2]|0)-M|0,n[$e>>2]=B,(B|0)>0)j=Q;else break}else O=Xt;if((B|0)<0){f=((k+25|0)/9|0)+1|0,Qe=(lt|0)==102;do{if(Oe=0-B|0,Oe=(Oe|0)<9?Oe:9,O>>>0>>0){M=(1<>>Oe,se=0,B=O;do qe=n[B>>2]|0,n[B>>2]=(qe>>>Oe)+se,se=Ue(qe&M,j)|0,B=B+4|0;while(B>>>0>>0);B=n[O>>2]|0?O:O+4|0,se?(n[Q>>2]=se,O=B,B=Q+4|0):(O=B,B=Q)}else O=n[O>>2]|0?O:O+4|0,B=Q;Q=Qe?Xt:O,Q=(B-Q>>2|0)>(f|0)?Q+(f<<2)|0:B,B=(n[$e>>2]|0)+Oe|0,n[$e>>2]=B}while((B|0)<0);B=O,f=Q}else B=O,f=Q;if(qe=Xt,B>>>0>>0){if(Q=(qe-B>>2)*9|0,M=n[B>>2]|0,M>>>0>=10){O=10;do O=O*10|0,Q=Q+1|0;while(M>>>0>=O>>>0)}}else Q=0;if(Qe=(lt|0)==103,Je=(k|0)!=0,O=k-((lt|0)!=102?Q:0)+((Je&Qe)<<31>>31)|0,(O|0)<(((f-qe>>2)*9|0)+-9|0)){if(O=O+9216|0,Oe=Xt+4+(((O|0)/9|0)+-1024<<2)|0,O=((O|0)%9|0)+1|0,(O|0)<9){M=10;do M=M*10|0,O=O+1|0;while((O|0)!=9)}else M=10;if(j=n[Oe>>2]|0,se=(j>>>0)%(M>>>0)|0,O=(Oe+4|0)==(f|0),O&(se|0)==0)O=Oe;else if(je=((j>>>0)/(M>>>0)|0)&1|0?9007199254740994:9007199254740992,_e=(M|0)/2|0,l=se>>>0<_e>>>0?.5:O&(se|0)==(_e|0)?1:1.5,cr&&(_e=(o[Or>>0]|0)==45,l=_e?-l:l,je=_e?-je:je),O=j-se|0,n[Oe>>2]=O,je+l!=je){if(_e=O+M|0,n[Oe>>2]=_e,_e>>>0>999999999)for(Q=Oe;O=Q+-4|0,n[Q>>2]=0,O>>>0>>0&&(B=B+-4|0,n[B>>2]=0),_e=(n[O>>2]|0)+1|0,n[O>>2]=_e,_e>>>0>999999999;)Q=O;else O=Oe;if(Q=(qe-B>>2)*9|0,j=n[B>>2]|0,j>>>0>=10){M=10;do M=M*10|0,Q=Q+1|0;while(j>>>0>=M>>>0)}}else O=Oe;O=O+4|0,O=f>>>0>O>>>0?O:f,_e=B}else O=f,_e=B;for(lt=O;;){if(lt>>>0<=_e>>>0){$e=0;break}if(B=lt+-4|0,!(n[B>>2]|0))lt=B;else{$e=1;break}}f=0-Q|0;do if(Qe)if(B=((Je^1)&1)+k|0,(B|0)>(Q|0)&(Q|0)>-5?(M=m+-1|0,k=B+-1-Q|0):(M=m+-2|0,k=B+-1|0),B=d&8,B)Oe=B;else{if($e&&(Lt=n[lt+-4>>2]|0,(Lt|0)!=0))if((Lt>>>0)%10|0)O=0;else{O=0,B=10;do B=B*10|0,O=O+1|0;while(!((Lt>>>0)%(B>>>0)|0|0))}else O=9;if(B=((lt-qe>>2)*9|0)+-9|0,(M|32|0)==102){Oe=B-O|0,Oe=(Oe|0)>0?Oe:0,k=(k|0)<(Oe|0)?k:Oe,Oe=0;break}else{Oe=B+Q-O|0,Oe=(Oe|0)>0?Oe:0,k=(k|0)<(Oe|0)?k:Oe,Oe=0;break}}else M=m,Oe=d&8;while(!1);if(Qe=k|Oe,j=(Qe|0)!=0&1,se=(M|32|0)==102,se)Je=0,B=(Q|0)>0?Q:0;else{if(B=(Q|0)<0?f:Q,B=Ld(B,((B|0)<0)<<31>>31,Pr)|0,O=Pr,(O-B|0)<2)do B=B+-1|0,o[B>>0]=48;while((O-B|0)<2);o[B+-1>>0]=(Q>>31&2)+43,B=B+-2|0,o[B>>0]=M,Je=B,B=O-B|0}if(B=cr+1+k+j+B|0,Ds(s,32,c,B,d),as(s,Or,cr),Ds(s,48,c,B,d^65536),se){M=_e>>>0>Xt>>>0?Xt:_e,Oe=ar+9|0,j=Oe,se=ar+8|0,O=M;do{if(Q=Ld(n[O>>2]|0,0,Oe)|0,(O|0)==(M|0))(Q|0)==(Oe|0)&&(o[se>>0]=48,Q=se);else if(Q>>>0>ar>>>0){Od(ar|0,48,Q-Tr|0)|0;do Q=Q+-1|0;while(Q>>>0>ar>>>0)}as(s,Q,j-Q|0),O=O+4|0}while(O>>>0<=Xt>>>0);if(Qe|0&&as(s,5710,1),O>>>0>>0&(k|0)>0)for(;;){if(Q=Ld(n[O>>2]|0,0,Oe)|0,Q>>>0>ar>>>0){Od(ar|0,48,Q-Tr|0)|0;do Q=Q+-1|0;while(Q>>>0>ar>>>0)}if(as(s,Q,(k|0)<9?k:9),O=O+4|0,Q=k+-9|0,O>>>0>>0&(k|0)>9)k=Q;else{k=Q;break}}Ds(s,48,k+9|0,9,0)}else{if(Qe=$e?lt:_e+4|0,(k|0)>-1){$e=ar+9|0,Oe=(Oe|0)==0,f=$e,j=0-Tr|0,se=ar+8|0,M=_e;do{Q=Ld(n[M>>2]|0,0,$e)|0,(Q|0)==($e|0)&&(o[se>>0]=48,Q=se);do if((M|0)==(_e|0)){if(O=Q+1|0,as(s,Q,1),Oe&(k|0)<1){Q=O;break}as(s,5710,1),Q=O}else{if(Q>>>0<=ar>>>0)break;Od(ar|0,48,Q+j|0)|0;do Q=Q+-1|0;while(Q>>>0>ar>>>0)}while(!1);Tr=f-Q|0,as(s,Q,(k|0)>(Tr|0)?Tr:k),k=k-Tr|0,M=M+4|0}while(M>>>0>>0&(k|0)>-1)}Ds(s,48,k+18|0,18,0),as(s,Je,Pr-Je|0)}Ds(s,32,c,B,d^8192)}else ar=(m&32|0)!=0,B=cr+3|0,Ds(s,32,c,B,d&-65537),as(s,Or,cr),as(s,l!=l|!1?ar?5686:5690:ar?5678:5682,3),Ds(s,32,c,B,d^8192);while(!1);return C=xn,((B|0)<(c|0)?c:B)|0}function i7(s){s=+s;var l=0;return E[v>>3]=s,l=n[v>>2]|0,we=n[v+4>>2]|0,l|0}function qUe(s,l){return s=+s,l=l|0,+ +s7(s,l)}function s7(s,l){s=+s,l=l|0;var c=0,f=0,d=0;switch(E[v>>3]=s,c=n[v>>2]|0,f=n[v+4>>2]|0,d=eD(c|0,f|0,52)|0,d&2047){case 0:{s!=0?(s=+s7(s*18446744073709552e3,l),c=(n[l>>2]|0)+-64|0):c=0,n[l>>2]=c;break}case 2047:break;default:n[l>>2]=(d&2047)+-1022,n[v>>2]=c,n[v+4>>2]=f&-2146435073|1071644672,s=+E[v>>3]}return+s}function jUe(s,l,c){s=s|0,l=l|0,c=c|0;do if(s){if(l>>>0<128){o[s>>0]=l,s=1;break}if(!(n[n[(GUe()|0)+188>>2]>>2]|0))if((l&-128|0)==57216){o[s>>0]=l,s=1;break}else{n[(Nd()|0)>>2]=84,s=-1;break}if(l>>>0<2048){o[s>>0]=l>>>6|192,o[s+1>>0]=l&63|128,s=2;break}if(l>>>0<55296|(l&-8192|0)==57344){o[s>>0]=l>>>12|224,o[s+1>>0]=l>>>6&63|128,o[s+2>>0]=l&63|128,s=3;break}if((l+-65536|0)>>>0<1048576){o[s>>0]=l>>>18|240,o[s+1>>0]=l>>>12&63|128,o[s+2>>0]=l>>>6&63|128,o[s+3>>0]=l&63|128,s=4;break}else{n[(Nd()|0)>>2]=84,s=-1;break}}else s=1;while(!1);return s|0}function GUe(){return gR()|0}function YUe(){return gR()|0}function WUe(s,l){s=s|0,l=l|0;var c=0,f=0;for(f=0;;){if((u[5712+f>>0]|0)==(s|0)){s=2;break}if(c=f+1|0,(c|0)==87){c=5800,f=87,s=5;break}else f=c}if((s|0)==2&&(f?(c=5800,s=5):c=5800),(s|0)==5)for(;;){do s=c,c=c+1|0;while(o[s>>0]|0);if(f=f+-1|0,f)s=5;else break}return KUe(c,n[l+20>>2]|0)|0}function KUe(s,l){return s=s|0,l=l|0,VUe(s,l)|0}function VUe(s,l){return s=s|0,l=l|0,l?l=zUe(n[l>>2]|0,n[l+4>>2]|0,s)|0:l=0,(l|0?l:s)|0}function zUe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0;se=(n[s>>2]|0)+1794895138|0,m=x0(n[s+8>>2]|0,se)|0,f=x0(n[s+12>>2]|0,se)|0,d=x0(n[s+16>>2]|0,se)|0;e:do if(m>>>0>>2>>>0&&(j=l-(m<<2)|0,f>>>0>>0&d>>>0>>0)&&!((d|f)&3|0)){for(j=f>>>2,M=d>>>2,O=0;;){if(k=m>>>1,Q=O+k|0,B=Q<<1,d=B+j|0,f=x0(n[s+(d<<2)>>2]|0,se)|0,d=x0(n[s+(d+1<<2)>>2]|0,se)|0,!(d>>>0>>0&f>>>0<(l-d|0)>>>0)){f=0;break e}if(o[s+(d+f)>>0]|0){f=0;break e}if(f=$9(c,s+d|0)|0,!f)break;if(f=(f|0)<0,(m|0)==1){f=0;break e}else O=f?O:Q,m=f?k:m-k|0}f=B+M|0,d=x0(n[s+(f<<2)>>2]|0,se)|0,f=x0(n[s+(f+1<<2)>>2]|0,se)|0,f>>>0>>0&d>>>0<(l-f|0)>>>0?f=o[s+(f+d)>>0]|0?0:s+f|0:f=0}else f=0;while(!1);return f|0}function x0(s,l){s=s|0,l=l|0;var c=0;return c=p7(s|0)|0,(l|0?c:s)|0}function JUe(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0,k=0;f=c+16|0,d=n[f>>2]|0,d?m=5:XUe(c)|0?f=0:(d=n[f>>2]|0,m=5);e:do if((m|0)==5){if(k=c+20|0,B=n[k>>2]|0,f=B,(d-B|0)>>>0>>0){f=rD[n[c+36>>2]&7](c,s,l)|0;break}t:do if((o[c+75>>0]|0)>-1){for(B=l;;){if(!B){m=0,d=s;break t}if(d=B+-1|0,(o[s+d>>0]|0)==10)break;B=d}if(f=rD[n[c+36>>2]&7](c,s,B)|0,f>>>0>>0)break e;m=B,d=s+B|0,l=l-B|0,f=n[k>>2]|0}else m=0,d=s;while(!1);Dr(f|0,d|0,l|0)|0,n[k>>2]=(n[k>>2]|0)+l,f=m+l|0}while(!1);return f|0}function XUe(s){s=s|0;var l=0,c=0;return l=s+74|0,c=o[l>>0]|0,o[l>>0]=c+255|c,l=n[s>>2]|0,l&8?(n[s>>2]=l|32,s=-1):(n[s+8>>2]=0,n[s+4>>2]=0,c=n[s+44>>2]|0,n[s+28>>2]=c,n[s+20>>2]=c,n[s+16>>2]=c+(n[s+48>>2]|0),s=0),s|0}function _n(s,l){s=y(s),l=y(l);var c=0,f=0;c=o7(s)|0;do if((c&2147483647)>>>0<=2139095040){if(f=o7(l)|0,(f&2147483647)>>>0<=2139095040)if((f^c|0)<0){s=(c|0)<0?l:s;break}else{s=s>2]=s,n[v>>2]|0|0}function k0(s,l){s=y(s),l=y(l);var c=0,f=0;c=a7(s)|0;do if((c&2147483647)>>>0<=2139095040){if(f=a7(l)|0,(f&2147483647)>>>0<=2139095040)if((f^c|0)<0){s=(c|0)<0?s:l;break}else{s=s>2]=s,n[v>>2]|0|0}function mR(s,l){s=y(s),l=y(l);var c=0,f=0,d=0,m=0,B=0,k=0,Q=0,O=0;m=(h[v>>2]=s,n[v>>2]|0),k=(h[v>>2]=l,n[v>>2]|0),c=m>>>23&255,B=k>>>23&255,Q=m&-2147483648,d=k<<1;e:do if(d|0&&!((c|0)==255|((ZUe(l)|0)&2147483647)>>>0>2139095040)){if(f=m<<1,f>>>0<=d>>>0)return l=y(s*y(0)),y((f|0)==(d|0)?l:s);if(c)f=m&8388607|8388608;else{if(c=m<<9,(c|0)>-1){f=c,c=0;do c=c+-1|0,f=f<<1;while((f|0)>-1)}else c=0;f=m<<1-c}if(B)k=k&8388607|8388608;else{if(m=k<<9,(m|0)>-1){d=0;do d=d+-1|0,m=m<<1;while((m|0)>-1)}else d=0;B=d,k=k<<1-d}d=f-k|0,m=(d|0)>-1;t:do if((c|0)>(B|0)){for(;;){if(m)if(d)f=d;else break;if(f=f<<1,c=c+-1|0,d=f-k|0,m=(d|0)>-1,(c|0)<=(B|0))break t}l=y(s*y(0));break e}while(!1);if(m)if(d)f=d;else{l=y(s*y(0));break}if(f>>>0<8388608)do f=f<<1,c=c+-1|0;while(f>>>0<8388608);(c|0)>0?c=f+-8388608|c<<23:c=f>>>(1-c|0),l=(n[v>>2]=c|Q,y(h[v>>2]))}else O=3;while(!1);return(O|0)==3&&(l=y(s*l),l=y(l/l)),y(l)}function ZUe(s){return s=y(s),h[v>>2]=s,n[v>>2]|0|0}function $Ue(s,l){return s=s|0,l=l|0,e7(n[582]|0,s,l)|0}function Jr(s){s=s|0,Tt()}function Md(s){s=s|0}function e3e(s,l){return s=s|0,l=l|0,0}function t3e(s){return s=s|0,(l7(s+4|0)|0)==-1?(ef[n[(n[s>>2]|0)+8>>2]&127](s),s=1):s=0,s|0}function l7(s){s=s|0;var l=0;return l=n[s>>2]|0,n[s>>2]=l+-1,l+-1|0}function Sp(s){s=s|0,t3e(s)|0&&r3e(s)}function r3e(s){s=s|0;var l=0;l=s+8|0,n[l>>2]|0&&(l7(l)|0)!=-1||ef[n[(n[s>>2]|0)+16>>2]&127](s)}function Kt(s){s=s|0;var l=0;for(l=s|0?s:1;s=Jv(l)|0,!(s|0);){if(s=i3e()|0,!s){s=0;break}B7[s&0]()}return s|0}function c7(s){return s=s|0,Kt(s)|0}function gt(s){s=s|0,Xv(s)}function n3e(s){s=s|0,(o[s+11>>0]|0)<0&>(n[s>>2]|0)}function i3e(){var s=0;return s=n[2923]|0,n[2923]=s+0,s|0}function s3e(){}function $v(s,l,c,f){return s=s|0,l=l|0,c=c|0,f=f|0,f=l-f-(c>>>0>s>>>0|0)>>>0,we=f,s-c>>>0|0|0}function yR(s,l,c,f){return s=s|0,l=l|0,c=c|0,f=f|0,c=s+c>>>0,we=l+f+(c>>>0>>0|0)>>>0,c|0|0}function Od(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0,B=0;if(m=s+c|0,l=l&255,(c|0)>=67){for(;s&3;)o[s>>0]=l,s=s+1|0;for(f=m&-4|0,d=f-64|0,B=l|l<<8|l<<16|l<<24;(s|0)<=(d|0);)n[s>>2]=B,n[s+4>>2]=B,n[s+8>>2]=B,n[s+12>>2]=B,n[s+16>>2]=B,n[s+20>>2]=B,n[s+24>>2]=B,n[s+28>>2]=B,n[s+32>>2]=B,n[s+36>>2]=B,n[s+40>>2]=B,n[s+44>>2]=B,n[s+48>>2]=B,n[s+52>>2]=B,n[s+56>>2]=B,n[s+60>>2]=B,s=s+64|0;for(;(s|0)<(f|0);)n[s>>2]=B,s=s+4|0}for(;(s|0)<(m|0);)o[s>>0]=l,s=s+1|0;return m-c|0}function u7(s,l,c){return s=s|0,l=l|0,c=c|0,(c|0)<32?(we=l<>>32-c,s<>>c,s>>>c|(l&(1<>>c-32|0)}function Dr(s,l,c){s=s|0,l=l|0,c=c|0;var f=0,d=0,m=0;if((c|0)>=8192)return fc(s|0,l|0,c|0)|0;if(m=s|0,d=s+c|0,(s&3)==(l&3)){for(;s&3;){if(!c)return m|0;o[s>>0]=o[l>>0]|0,s=s+1|0,l=l+1|0,c=c-1|0}for(c=d&-4|0,f=c-64|0;(s|0)<=(f|0);)n[s>>2]=n[l>>2],n[s+4>>2]=n[l+4>>2],n[s+8>>2]=n[l+8>>2],n[s+12>>2]=n[l+12>>2],n[s+16>>2]=n[l+16>>2],n[s+20>>2]=n[l+20>>2],n[s+24>>2]=n[l+24>>2],n[s+28>>2]=n[l+28>>2],n[s+32>>2]=n[l+32>>2],n[s+36>>2]=n[l+36>>2],n[s+40>>2]=n[l+40>>2],n[s+44>>2]=n[l+44>>2],n[s+48>>2]=n[l+48>>2],n[s+52>>2]=n[l+52>>2],n[s+56>>2]=n[l+56>>2],n[s+60>>2]=n[l+60>>2],s=s+64|0,l=l+64|0;for(;(s|0)<(c|0);)n[s>>2]=n[l>>2],s=s+4|0,l=l+4|0}else for(c=d-4|0;(s|0)<(c|0);)o[s>>0]=o[l>>0]|0,o[s+1>>0]=o[l+1>>0]|0,o[s+2>>0]=o[l+2>>0]|0,o[s+3>>0]=o[l+3>>0]|0,s=s+4|0,l=l+4|0;for(;(s|0)<(d|0);)o[s>>0]=o[l>>0]|0,s=s+1|0,l=l+1|0;return m|0}function A7(s){s=s|0;var l=0;return l=o[L+(s&255)>>0]|0,(l|0)<8?l|0:(l=o[L+(s>>8&255)>>0]|0,(l|0)<8?l+8|0:(l=o[L+(s>>16&255)>>0]|0,(l|0)<8?l+16|0:(o[L+(s>>>24)>>0]|0)+24|0))}function f7(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0;var m=0,B=0,k=0,Q=0,O=0,M=0,j=0,se=0,je=0,Oe=0;if(M=s,Q=l,O=Q,B=c,se=f,k=se,!O)return m=(d|0)!=0,k?m?(n[d>>2]=s|0,n[d+4>>2]=l&0,se=0,d=0,we=se,d|0):(se=0,d=0,we=se,d|0):(m&&(n[d>>2]=(M>>>0)%(B>>>0),n[d+4>>2]=0),se=0,d=(M>>>0)/(B>>>0)>>>0,we=se,d|0);m=(k|0)==0;do if(B){if(!m){if(m=(S(k|0)|0)-(S(O|0)|0)|0,m>>>0<=31){j=m+1|0,k=31-m|0,l=m-31>>31,B=j,s=M>>>(j>>>0)&l|O<>>(j>>>0)&l,m=0,k=M<>2]=s|0,n[d+4>>2]=Q|l&0,se=0,d=0,we=se,d|0):(se=0,d=0,we=se,d|0)}if(m=B-1|0,m&B|0){k=(S(B|0)|0)+33-(S(O|0)|0)|0,Oe=64-k|0,j=32-k|0,Q=j>>31,je=k-32|0,l=je>>31,B=k,s=j-1>>31&O>>>(je>>>0)|(O<>>(k>>>0))&l,l=l&O>>>(k>>>0),m=M<>>(je>>>0))&Q|M<>31;break}return d|0&&(n[d>>2]=m&M,n[d+4>>2]=0),(B|0)==1?(je=Q|l&0,Oe=s|0|0,we=je,Oe|0):(Oe=A7(B|0)|0,je=O>>>(Oe>>>0)|0,Oe=O<<32-Oe|M>>>(Oe>>>0)|0,we=je,Oe|0)}else{if(m)return d|0&&(n[d>>2]=(O>>>0)%(B>>>0),n[d+4>>2]=0),je=0,Oe=(O>>>0)/(B>>>0)>>>0,we=je,Oe|0;if(!M)return d|0&&(n[d>>2]=0,n[d+4>>2]=(O>>>0)%(k>>>0)),je=0,Oe=(O>>>0)/(k>>>0)>>>0,we=je,Oe|0;if(m=k-1|0,!(m&k))return d|0&&(n[d>>2]=s|0,n[d+4>>2]=m&O|l&0),je=0,Oe=O>>>((A7(k|0)|0)>>>0),we=je,Oe|0;if(m=(S(k|0)|0)-(S(O|0)|0)|0,m>>>0<=30){l=m+1|0,k=31-m|0,B=l,s=O<>>(l>>>0),l=O>>>(l>>>0),m=0,k=M<>2]=s|0,n[d+4>>2]=Q|l&0,je=0,Oe=0,we=je,Oe|0):(je=0,Oe=0,we=je,Oe|0)}while(!1);if(!B)O=k,Q=0,k=0;else{j=c|0|0,M=se|f&0,O=yR(j|0,M|0,-1,-1)|0,c=we,Q=k,k=0;do f=Q,Q=m>>>31|Q<<1,m=k|m<<1,f=s<<1|f>>>31|0,se=s>>>31|l<<1|0,$v(O|0,c|0,f|0,se|0)|0,Oe=we,je=Oe>>31|((Oe|0)<0?-1:0)<<1,k=je&1,s=$v(f|0,se|0,je&j|0,(((Oe|0)<0?-1:0)>>31|((Oe|0)<0?-1:0)<<1)&M|0)|0,l=we,B=B-1|0;while(B|0);O=Q,Q=0}return B=0,d|0&&(n[d>>2]=s,n[d+4>>2]=l),je=(m|0)>>>31|(O|B)<<1|(B<<1|m>>>31)&0|Q,Oe=(m<<1|0)&-2|k,we=je,Oe|0}function ER(s,l,c,f){return s=s|0,l=l|0,c=c|0,f=f|0,f7(s,l,c,f,0)|0}function bp(s){s=s|0;var l=0,c=0;return c=s+15&-16|0,l=n[I>>2]|0,s=l+c|0,(c|0)>0&(s|0)<(l|0)|(s|0)<0?(ie()|0,vA(12),-1):(n[I>>2]=s,(s|0)>($()|0)&&!(X()|0)?(n[I>>2]=l,vA(12),-1):l|0)}function ww(s,l,c){s=s|0,l=l|0,c=c|0;var f=0;if((l|0)<(s|0)&(s|0)<(l+c|0)){for(f=s,l=l+c|0,s=s+c|0;(c|0)>0;)s=s-1|0,l=l-1|0,c=c-1|0,o[s>>0]=o[l>>0]|0;s=f}else Dr(s,l,c)|0;return s|0}function CR(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0;var d=0,m=0;return m=C,C=C+16|0,d=m|0,f7(s,l,c,f,d)|0,C=m,we=n[d+4>>2]|0,n[d>>2]|0|0}function p7(s){return s=s|0,(s&255)<<24|(s>>8&255)<<16|(s>>16&255)<<8|s>>>24|0}function o3e(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,h7[s&1](l|0,c|0,f|0,d|0,m|0)}function a3e(s,l,c){s=s|0,l=l|0,c=y(c),g7[s&1](l|0,y(c))}function l3e(s,l,c){s=s|0,l=l|0,c=+c,d7[s&31](l|0,+c)}function c3e(s,l,c,f){return s=s|0,l=l|0,c=y(c),f=y(f),y(m7[s&0](l|0,y(c),y(f)))}function u3e(s,l){s=s|0,l=l|0,ef[s&127](l|0)}function A3e(s,l,c){s=s|0,l=l|0,c=c|0,tf[s&31](l|0,c|0)}function f3e(s,l){return s=s|0,l=l|0,F0[s&31](l|0)|0}function p3e(s,l,c,f,d){s=s|0,l=l|0,c=+c,f=+f,d=d|0,y7[s&1](l|0,+c,+f,d|0)}function h3e(s,l,c,f){s=s|0,l=l|0,c=+c,f=+f,V3e[s&1](l|0,+c,+f)}function g3e(s,l,c,f){return s=s|0,l=l|0,c=c|0,f=f|0,rD[s&7](l|0,c|0,f|0)|0}function d3e(s,l,c,f){return s=s|0,l=l|0,c=c|0,f=f|0,+z3e[s&1](l|0,c|0,f|0)}function m3e(s,l){return s=s|0,l=l|0,+E7[s&15](l|0)}function y3e(s,l,c){return s=s|0,l=l|0,c=+c,J3e[s&1](l|0,+c)|0}function E3e(s,l,c){return s=s|0,l=l|0,c=c|0,IR[s&15](l|0,c|0)|0}function C3e(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=+f,d=+d,m=m|0,X3e[s&1](l|0,c|0,+f,+d,m|0)}function w3e(s,l,c,f,d,m,B){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,B=B|0,Z3e[s&1](l|0,c|0,f|0,d|0,m|0,B|0)}function I3e(s,l,c){return s=s|0,l=l|0,c=c|0,+C7[s&7](l|0,c|0)}function B3e(s){return s=s|0,nD[s&7]()|0}function v3e(s,l,c,f,d,m){return s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,w7[s&1](l|0,c|0,f|0,d|0,m|0)|0}function D3e(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=+d,$3e[s&1](l|0,c|0,f|0,+d)}function P3e(s,l,c,f,d,m,B){s=s|0,l=l|0,c=c|0,f=y(f),d=d|0,m=y(m),B=B|0,I7[s&1](l|0,c|0,y(f),d|0,y(m),B|0)}function S3e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,vw[s&15](l|0,c|0,f|0)}function b3e(s){s=s|0,B7[s&0]()}function x3e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=+f,v7[s&15](l|0,c|0,+f)}function k3e(s,l,c){return s=s|0,l=+l,c=+c,e_e[s&1](+l,+c)|0}function Q3e(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,BR[s&15](l|0,c|0,f|0,d|0)}function F3e(s,l,c,f,d){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,F(0)}function R3e(s,l){s=s|0,l=y(l),F(1)}function Ea(s,l){s=s|0,l=+l,F(2)}function T3e(s,l,c){return s=s|0,l=y(l),c=y(c),F(3),Xe}function Er(s){s=s|0,F(4)}function Iw(s,l){s=s|0,l=l|0,F(5)}function Xa(s){return s=s|0,F(6),0}function N3e(s,l,c,f){s=s|0,l=+l,c=+c,f=f|0,F(7)}function L3e(s,l,c){s=s|0,l=+l,c=+c,F(8)}function M3e(s,l,c){return s=s|0,l=l|0,c=c|0,F(9),0}function O3e(s,l,c){return s=s|0,l=l|0,c=c|0,F(10),0}function Q0(s){return s=s|0,F(11),0}function U3e(s,l){return s=s|0,l=+l,F(12),0}function Bw(s,l){return s=s|0,l=l|0,F(13),0}function _3e(s,l,c,f,d){s=s|0,l=l|0,c=+c,f=+f,d=d|0,F(14)}function H3e(s,l,c,f,d,m){s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,m=m|0,F(15)}function wR(s,l){return s=s|0,l=l|0,F(16),0}function q3e(){return F(17),0}function j3e(s,l,c,f,d){return s=s|0,l=l|0,c=c|0,f=f|0,d=d|0,F(18),0}function G3e(s,l,c,f){s=s|0,l=l|0,c=c|0,f=+f,F(19)}function Y3e(s,l,c,f,d,m){s=s|0,l=l|0,c=y(c),f=f|0,d=y(d),m=m|0,F(20)}function tD(s,l,c){s=s|0,l=l|0,c=c|0,F(21)}function W3e(){F(22)}function Ud(s,l,c){s=s|0,l=l|0,c=+c,F(23)}function K3e(s,l){return s=+s,l=+l,F(24),0}function _d(s,l,c,f){s=s|0,l=l|0,c=c|0,f=f|0,F(25)}var h7=[F3e,jLe],g7=[R3e,fo],d7=[Ea,fw,pw,lF,cF,Dl,hw,uF,xd,ku,dw,AF,Lv,WA,Mv,kd,Ov,Uv,Qd,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea,Ea],m7=[T3e],ef=[Er,Md,BDe,vDe,DDe,exe,txe,rxe,yNe,ENe,CNe,kLe,QLe,FLe,Z4e,$4e,eUe,ds,Qv,bd,YA,gw,Eve,Cve,pDe,RDe,YDe,cPe,DPe,qPe,sSe,CSe,NSe,XSe,pbe,xbe,Ybe,Exe,Nxe,Xxe,pke,xke,Yke,uQe,DQe,UQe,tFe,bc,FFe,VFe,pRe,QRe,WRe,pTe,BTe,PTe,jTe,WTe,cNe,INe,DNe,qNe,oLe,eG,HMe,yOe,ROe,VOe,d4e,Q4e,q4e,Y4e,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er,Er],tf=[Iw,tF,rF,Aw,xu,nF,iF,Cp,sF,oF,aF,Nv,KA,Ve,At,Wt,vr,Sn,Qr,pF,ove,xve,hQe,bQe,NRe,GMe,hLe,O9,Iw,Iw,Iw,Iw],F0=[Xa,xUe,eF,D,fe,De,vt,wt,bt,_r,di,po,nve,ive,wve,iFe,JRe,YNe,VMe,Va,Xa,Xa,Xa,Xa,Xa,Xa,Xa,Xa,Xa,Xa,Xa,Xa],y7=[N3e,Ive],V3e=[L3e,fNe],rD=[M3e,Z9,kUe,RUe,WPe,Bxe,LFe,ZOe],z3e=[O3e,mbe],E7=[Q0,Yo,nt,bn,Bve,vve,Dve,Pve,Sve,bve,Q0,Q0,Q0,Q0,Q0,Q0],J3e=[U3e,CTe],IR=[Bw,e3e,sve,mDe,pPe,lSe,BSe,zbe,Uxe,jQe,xv,MOe,Bw,Bw,Bw,Bw],X3e=[_3e,zDe],Z3e=[H3e,C4e],C7=[wR,ai,kve,Qve,Fve,Rbe,wR,wR],nD=[q3e,Rve,cw,ma,kTe,JTe,xNe,z4e],w7=[j3e,nw],$3e=[G3e,mke],I7=[Y3e,ave],vw=[tD,T,os,tn,ho,xPe,OSe,Rke,zke,Sd,fMe,IOe,N4e,tD,tD,tD],B7=[W3e],v7=[Ud,Fv,Rv,Tv,GA,_v,fF,P,tke,ZFe,dTe,Ud,Ud,Ud,Ud,Ud],e_e=[K3e,dNe],BR=[_d,tbe,fFe,mRe,sTe,LTe,rNe,LNe,ALe,eOe,oUe,_d,_d,_d,_d,_d];return{_llvm_bswap_i32:p7,dynCall_idd:k3e,dynCall_i:B3e,_i64Subtract:$v,___udivdi3:ER,dynCall_vif:a3e,setThrew:du,dynCall_viii:S3e,_bitshift64Lshr:eD,_bitshift64Shl:u7,dynCall_vi:u3e,dynCall_viiddi:C3e,dynCall_diii:d3e,dynCall_iii:E3e,_memset:Od,_sbrk:bp,_memcpy:Dr,__GLOBAL__sub_I_Yoga_cpp:Pd,dynCall_vii:A3e,___uremdi3:CR,dynCall_vid:l3e,stackAlloc:lo,_nbind_init:mUe,getTempRet0:qa,dynCall_di:m3e,dynCall_iid:y3e,setTempRet0:bA,_i64Add:yR,dynCall_fiff:c3e,dynCall_iiii:g3e,_emscripten_get_global_libc:bUe,dynCall_viid:x3e,dynCall_viiid:D3e,dynCall_viififi:P3e,dynCall_ii:f3e,__GLOBAL__sub_I_Binding_cc:RMe,dynCall_viiii:Q3e,dynCall_iiiiii:v3e,stackSave:dc,dynCall_viiiii:o3e,__GLOBAL__sub_I_nbind_cc:Tve,dynCall_vidd:h3e,_free:Xv,runPostSets:s3e,dynCall_viiiiii:w3e,establishStackSpace:qi,_memmove:ww,stackRestore:gu,_malloc:Jv,__GLOBAL__sub_I_common_cc:$Ne,dynCall_viddi:p3e,dynCall_dii:I3e,dynCall_v:b3e}}(Module.asmGlobalArg,Module.asmLibraryArg,buffer),_llvm_bswap_i32=Module._llvm_bswap_i32=asm._llvm_bswap_i32,getTempRet0=Module.getTempRet0=asm.getTempRet0,___udivdi3=Module.___udivdi3=asm.___udivdi3,setThrew=Module.setThrew=asm.setThrew,_bitshift64Lshr=Module._bitshift64Lshr=asm._bitshift64Lshr,_bitshift64Shl=Module._bitshift64Shl=asm._bitshift64Shl,_memset=Module._memset=asm._memset,_sbrk=Module._sbrk=asm._sbrk,_memcpy=Module._memcpy=asm._memcpy,stackAlloc=Module.stackAlloc=asm.stackAlloc,___uremdi3=Module.___uremdi3=asm.___uremdi3,_nbind_init=Module._nbind_init=asm._nbind_init,_i64Subtract=Module._i64Subtract=asm._i64Subtract,setTempRet0=Module.setTempRet0=asm.setTempRet0,_i64Add=Module._i64Add=asm._i64Add,_emscripten_get_global_libc=Module._emscripten_get_global_libc=asm._emscripten_get_global_libc,__GLOBAL__sub_I_Yoga_cpp=Module.__GLOBAL__sub_I_Yoga_cpp=asm.__GLOBAL__sub_I_Yoga_cpp,__GLOBAL__sub_I_Binding_cc=Module.__GLOBAL__sub_I_Binding_cc=asm.__GLOBAL__sub_I_Binding_cc,stackSave=Module.stackSave=asm.stackSave,__GLOBAL__sub_I_nbind_cc=Module.__GLOBAL__sub_I_nbind_cc=asm.__GLOBAL__sub_I_nbind_cc,_free=Module._free=asm._free,runPostSets=Module.runPostSets=asm.runPostSets,establishStackSpace=Module.establishStackSpace=asm.establishStackSpace,_memmove=Module._memmove=asm._memmove,stackRestore=Module.stackRestore=asm.stackRestore,_malloc=Module._malloc=asm._malloc,__GLOBAL__sub_I_common_cc=Module.__GLOBAL__sub_I_common_cc=asm.__GLOBAL__sub_I_common_cc,dynCall_viiiii=Module.dynCall_viiiii=asm.dynCall_viiiii,dynCall_vif=Module.dynCall_vif=asm.dynCall_vif,dynCall_vid=Module.dynCall_vid=asm.dynCall_vid,dynCall_fiff=Module.dynCall_fiff=asm.dynCall_fiff,dynCall_vi=Module.dynCall_vi=asm.dynCall_vi,dynCall_vii=Module.dynCall_vii=asm.dynCall_vii,dynCall_ii=Module.dynCall_ii=asm.dynCall_ii,dynCall_viddi=Module.dynCall_viddi=asm.dynCall_viddi,dynCall_vidd=Module.dynCall_vidd=asm.dynCall_vidd,dynCall_iiii=Module.dynCall_iiii=asm.dynCall_iiii,dynCall_diii=Module.dynCall_diii=asm.dynCall_diii,dynCall_di=Module.dynCall_di=asm.dynCall_di,dynCall_iid=Module.dynCall_iid=asm.dynCall_iid,dynCall_iii=Module.dynCall_iii=asm.dynCall_iii,dynCall_viiddi=Module.dynCall_viiddi=asm.dynCall_viiddi,dynCall_viiiiii=Module.dynCall_viiiiii=asm.dynCall_viiiiii,dynCall_dii=Module.dynCall_dii=asm.dynCall_dii,dynCall_i=Module.dynCall_i=asm.dynCall_i,dynCall_iiiiii=Module.dynCall_iiiiii=asm.dynCall_iiiiii,dynCall_viiid=Module.dynCall_viiid=asm.dynCall_viiid,dynCall_viififi=Module.dynCall_viififi=asm.dynCall_viififi,dynCall_viii=Module.dynCall_viii=asm.dynCall_viii,dynCall_v=Module.dynCall_v=asm.dynCall_v,dynCall_viid=Module.dynCall_viid=asm.dynCall_viid,dynCall_idd=Module.dynCall_idd=asm.dynCall_idd,dynCall_viiii=Module.dynCall_viiii=asm.dynCall_viiii;Runtime.stackAlloc=Module.stackAlloc,Runtime.stackSave=Module.stackSave,Runtime.stackRestore=Module.stackRestore,Runtime.establishStackSpace=Module.establishStackSpace,Runtime.setTempRet0=Module.setTempRet0,Runtime.getTempRet0=Module.getTempRet0,Module.asm=asm;function ExitStatus(t){this.name="ExitStatus",this.message="Program terminated with exit("+t+")",this.status=t}ExitStatus.prototype=new Error,ExitStatus.prototype.constructor=ExitStatus;var initialStackTop,preloadStartTime=null,calledMain=!1;dependenciesFulfilled=function t(){Module.calledRun||run(),Module.calledRun||(dependenciesFulfilled=t)},Module.callMain=Module.callMain=function t(e){e=e||[],ensureInitRuntime();var r=e.length+1;function o(){for(var p=0;p<3;p++)a.push(0)}var a=[allocate(intArrayFromString(Module.thisProgram),"i8",ALLOC_NORMAL)];o();for(var n=0;n0||(preRun(),runDependencies>0)||Module.calledRun)return;function e(){Module.calledRun||(Module.calledRun=!0,!ABORT&&(ensureInitRuntime(),preMain(),Module.onRuntimeInitialized&&Module.onRuntimeInitialized(),Module._main&&shouldRunNow&&Module.callMain(t),postRun()))}Module.setStatus?(Module.setStatus("Running..."),setTimeout(function(){setTimeout(function(){Module.setStatus("")},1),e()},1)):e()}Module.run=Module.run=run;function exit(t,e){e&&Module.noExitRuntime||(Module.noExitRuntime||(ABORT=!0,EXITSTATUS=t,STACKTOP=initialStackTop,exitRuntime(),Module.onExit&&Module.onExit(t)),ENVIRONMENT_IS_NODE&&process.exit(t),Module.quit(t,new ExitStatus(t)))}Module.exit=Module.exit=exit;var abortDecorators=[];function abort(t){Module.onAbort&&Module.onAbort(t),t!==void 0?(Module.print(t),Module.printErr(t),t=JSON.stringify(t)):t="",ABORT=!0,EXITSTATUS=1;var e=` +If this abort() is unexpected, build with -s ASSERTIONS=1 which can give more information.`,r="abort("+t+") at "+stackTrace()+e;throw abortDecorators&&abortDecorators.forEach(function(o){r=o(r,t)}),r}if(Module.abort=Module.abort=abort,Module.preInit)for(typeof Module.preInit=="function"&&(Module.preInit=[Module.preInit]);Module.preInit.length>0;)Module.preInit.pop()();var shouldRunNow=!0;Module.noInitialRun&&(shouldRunNow=!1),run()})});var Jg=_((SKt,_Ee)=>{"use strict";var Jyt=OEe(),Xyt=UEe(),v6=!1,D6=null;Xyt({},function(t,e){if(!v6){if(v6=!0,t)throw t;D6=e}});if(!v6)throw new Error("Failed to load the yoga module - it needed to be loaded synchronously, but didn't");_Ee.exports=Jyt(D6.bind,D6.lib)});var S6=_((bKt,P6)=>{"use strict";var HEe=t=>Number.isNaN(t)?!1:t>=4352&&(t<=4447||t===9001||t===9002||11904<=t&&t<=12871&&t!==12351||12880<=t&&t<=19903||19968<=t&&t<=42182||43360<=t&&t<=43388||44032<=t&&t<=55203||63744<=t&&t<=64255||65040<=t&&t<=65049||65072<=t&&t<=65131||65281<=t&&t<=65376||65504<=t&&t<=65510||110592<=t&&t<=110593||127488<=t&&t<=127569||131072<=t&&t<=262141);P6.exports=HEe;P6.exports.default=HEe});var jEe=_((xKt,qEe)=>{"use strict";qEe.exports=function(){return/\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g}});var Lk=_((kKt,b6)=>{"use strict";var Zyt=BP(),$yt=S6(),eEt=jEe(),GEe=t=>{if(typeof t!="string"||t.length===0||(t=Zyt(t),t.length===0))return 0;t=t.replace(eEt()," ");let e=0;for(let r=0;r=127&&o<=159||o>=768&&o<=879||(o>65535&&r++,e+=$yt(o)?2:1)}return e};b6.exports=GEe;b6.exports.default=GEe});var k6=_((QKt,x6)=>{"use strict";var tEt=Lk(),YEe=t=>{let e=0;for(let r of t.split(` +`))e=Math.max(e,tEt(r));return e};x6.exports=YEe;x6.exports.default=YEe});var WEe=_(W2=>{"use strict";var rEt=W2&&W2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(W2,"__esModule",{value:!0});var nEt=rEt(k6()),Q6={};W2.default=t=>{if(t.length===0)return{width:0,height:0};if(Q6[t])return Q6[t];let e=nEt.default(t),r=t.split(` +`).length;return Q6[t]={width:e,height:r},{width:e,height:r}}});var KEe=_(K2=>{"use strict";var iEt=K2&&K2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(K2,"__esModule",{value:!0});var dn=iEt(Jg()),sEt=(t,e)=>{"position"in e&&t.setPositionType(e.position==="absolute"?dn.default.POSITION_TYPE_ABSOLUTE:dn.default.POSITION_TYPE_RELATIVE)},oEt=(t,e)=>{"marginLeft"in e&&t.setMargin(dn.default.EDGE_START,e.marginLeft||0),"marginRight"in e&&t.setMargin(dn.default.EDGE_END,e.marginRight||0),"marginTop"in e&&t.setMargin(dn.default.EDGE_TOP,e.marginTop||0),"marginBottom"in e&&t.setMargin(dn.default.EDGE_BOTTOM,e.marginBottom||0)},aEt=(t,e)=>{"paddingLeft"in e&&t.setPadding(dn.default.EDGE_LEFT,e.paddingLeft||0),"paddingRight"in e&&t.setPadding(dn.default.EDGE_RIGHT,e.paddingRight||0),"paddingTop"in e&&t.setPadding(dn.default.EDGE_TOP,e.paddingTop||0),"paddingBottom"in e&&t.setPadding(dn.default.EDGE_BOTTOM,e.paddingBottom||0)},lEt=(t,e)=>{var r;"flexGrow"in e&&t.setFlexGrow((r=e.flexGrow)!==null&&r!==void 0?r:0),"flexShrink"in e&&t.setFlexShrink(typeof e.flexShrink=="number"?e.flexShrink:1),"flexDirection"in e&&(e.flexDirection==="row"&&t.setFlexDirection(dn.default.FLEX_DIRECTION_ROW),e.flexDirection==="row-reverse"&&t.setFlexDirection(dn.default.FLEX_DIRECTION_ROW_REVERSE),e.flexDirection==="column"&&t.setFlexDirection(dn.default.FLEX_DIRECTION_COLUMN),e.flexDirection==="column-reverse"&&t.setFlexDirection(dn.default.FLEX_DIRECTION_COLUMN_REVERSE)),"flexBasis"in e&&(typeof e.flexBasis=="number"?t.setFlexBasis(e.flexBasis):typeof e.flexBasis=="string"?t.setFlexBasisPercent(Number.parseInt(e.flexBasis,10)):t.setFlexBasis(NaN)),"alignItems"in e&&((e.alignItems==="stretch"||!e.alignItems)&&t.setAlignItems(dn.default.ALIGN_STRETCH),e.alignItems==="flex-start"&&t.setAlignItems(dn.default.ALIGN_FLEX_START),e.alignItems==="center"&&t.setAlignItems(dn.default.ALIGN_CENTER),e.alignItems==="flex-end"&&t.setAlignItems(dn.default.ALIGN_FLEX_END)),"alignSelf"in e&&((e.alignSelf==="auto"||!e.alignSelf)&&t.setAlignSelf(dn.default.ALIGN_AUTO),e.alignSelf==="flex-start"&&t.setAlignSelf(dn.default.ALIGN_FLEX_START),e.alignSelf==="center"&&t.setAlignSelf(dn.default.ALIGN_CENTER),e.alignSelf==="flex-end"&&t.setAlignSelf(dn.default.ALIGN_FLEX_END)),"justifyContent"in e&&((e.justifyContent==="flex-start"||!e.justifyContent)&&t.setJustifyContent(dn.default.JUSTIFY_FLEX_START),e.justifyContent==="center"&&t.setJustifyContent(dn.default.JUSTIFY_CENTER),e.justifyContent==="flex-end"&&t.setJustifyContent(dn.default.JUSTIFY_FLEX_END),e.justifyContent==="space-between"&&t.setJustifyContent(dn.default.JUSTIFY_SPACE_BETWEEN),e.justifyContent==="space-around"&&t.setJustifyContent(dn.default.JUSTIFY_SPACE_AROUND))},cEt=(t,e)=>{var r,o;"width"in e&&(typeof e.width=="number"?t.setWidth(e.width):typeof e.width=="string"?t.setWidthPercent(Number.parseInt(e.width,10)):t.setWidthAuto()),"height"in e&&(typeof e.height=="number"?t.setHeight(e.height):typeof e.height=="string"?t.setHeightPercent(Number.parseInt(e.height,10)):t.setHeightAuto()),"minWidth"in e&&(typeof e.minWidth=="string"?t.setMinWidthPercent(Number.parseInt(e.minWidth,10)):t.setMinWidth((r=e.minWidth)!==null&&r!==void 0?r:0)),"minHeight"in e&&(typeof e.minHeight=="string"?t.setMinHeightPercent(Number.parseInt(e.minHeight,10)):t.setMinHeight((o=e.minHeight)!==null&&o!==void 0?o:0))},uEt=(t,e)=>{"display"in e&&t.setDisplay(e.display==="flex"?dn.default.DISPLAY_FLEX:dn.default.DISPLAY_NONE)},AEt=(t,e)=>{if("borderStyle"in e){let r=typeof e.borderStyle=="string"?1:0;t.setBorder(dn.default.EDGE_TOP,r),t.setBorder(dn.default.EDGE_BOTTOM,r),t.setBorder(dn.default.EDGE_LEFT,r),t.setBorder(dn.default.EDGE_RIGHT,r)}};K2.default=(t,e={})=>{sEt(t,e),oEt(t,e),aEt(t,e),lEt(t,e),cEt(t,e),uEt(t,e),AEt(t,e)}});var JEe=_((TKt,zEe)=>{"use strict";var V2=Lk(),fEt=BP(),pEt=aI(),R6=new Set(["\x1B","\x9B"]),hEt=39,VEe=t=>`${R6.values().next().value}[${t}m`,gEt=t=>t.split(" ").map(e=>V2(e)),F6=(t,e,r)=>{let o=[...e],a=!1,n=V2(fEt(t[t.length-1]));for(let[u,A]of o.entries()){let p=V2(A);if(n+p<=r?t[t.length-1]+=A:(t.push(A),n=0),R6.has(A))a=!0;else if(a&&A==="m"){a=!1;continue}a||(n+=p,n===r&&u0&&t.length>1&&(t[t.length-2]+=t.pop())},dEt=t=>{let e=t.split(" "),r=e.length;for(;r>0&&!(V2(e[r-1])>0);)r--;return r===e.length?t:e.slice(0,r).join(" ")+e.slice(r).join("")},mEt=(t,e,r={})=>{if(r.trim!==!1&&t.trim()==="")return"";let o="",a="",n,u=gEt(t),A=[""];for(let[p,h]of t.split(" ").entries()){r.trim!==!1&&(A[A.length-1]=A[A.length-1].trimLeft());let E=V2(A[A.length-1]);if(p!==0&&(E>=e&&(r.wordWrap===!1||r.trim===!1)&&(A.push(""),E=0),(E>0||r.trim===!1)&&(A[A.length-1]+=" ",E++)),r.hard&&u[p]>e){let I=e-E,v=1+Math.floor((u[p]-I-1)/e);Math.floor((u[p]-1)/e)e&&E>0&&u[p]>0){if(r.wordWrap===!1&&Ee&&r.wordWrap===!1){F6(A,h,e);continue}A[A.length-1]+=h}r.trim!==!1&&(A=A.map(dEt)),o=A.join(` +`);for(let[p,h]of[...o].entries()){if(a+=h,R6.has(h)){let I=parseFloat(/\d[^m]*/.exec(o.slice(p,p+4)));n=I===hEt?null:I}let E=pEt.codes.get(Number(n));n&&E&&(o[p+1]===` +`?a+=VEe(E):h===` +`&&(a+=VEe(n)))}return a};zEe.exports=(t,e,r)=>String(t).normalize().replace(/\r\n/g,` +`).split(` +`).map(o=>mEt(o,e,r)).join(` +`)});var $Ee=_((NKt,ZEe)=>{"use strict";var XEe="[\uD800-\uDBFF][\uDC00-\uDFFF]",yEt=t=>t&&t.exact?new RegExp(`^${XEe}$`):new RegExp(XEe,"g");ZEe.exports=yEt});var T6=_((LKt,nCe)=>{"use strict";var EEt=S6(),CEt=$Ee(),eCe=aI(),rCe=["\x1B","\x9B"],Mk=t=>`${rCe[0]}[${t}m`,tCe=(t,e,r)=>{let o=[];t=[...t];for(let a of t){let n=a;a.match(";")&&(a=a.split(";")[0][0]+"0");let u=eCe.codes.get(parseInt(a,10));if(u){let A=t.indexOf(u.toString());A>=0?t.splice(A,1):o.push(Mk(e?u:n))}else if(e){o.push(Mk(0));break}else o.push(Mk(n))}if(e&&(o=o.filter((a,n)=>o.indexOf(a)===n),r!==void 0)){let a=Mk(eCe.codes.get(parseInt(r,10)));o=o.reduce((n,u)=>u===a?[u,...n]:[...n,u],[])}return o.join("")};nCe.exports=(t,e,r)=>{let o=[...t.normalize()],a=[];r=typeof r=="number"?r:o.length;let n=!1,u,A=0,p="";for(let[h,E]of o.entries()){let I=!1;if(rCe.includes(E)){let v=/\d[^m]*/.exec(t.slice(h,h+18));u=v&&v.length>0?v[0]:void 0,Ae&&A<=r)p+=E;else if(A===e&&!n&&u!==void 0)p=tCe(a);else if(A>=r){p+=tCe(a,!0,u);break}}return p}});var sCe=_((MKt,iCe)=>{"use strict";var Nh=T6(),wEt=Lk();function Ok(t,e,r){if(t.charAt(e)===" ")return e;for(let o=1;o<=3;o++)if(r){if(t.charAt(e+o)===" ")return e+o}else if(t.charAt(e-o)===" ")return e-o;return e}iCe.exports=(t,e,r)=>{r={position:"end",preferTruncationOnSpace:!1,...r};let{position:o,space:a,preferTruncationOnSpace:n}=r,u="\u2026",A=1;if(typeof t!="string")throw new TypeError(`Expected \`input\` to be a string, got ${typeof t}`);if(typeof e!="number")throw new TypeError(`Expected \`columns\` to be a number, got ${typeof e}`);if(e<1)return"";if(e===1)return u;let p=wEt(t);if(p<=e)return t;if(o==="start"){if(n){let h=Ok(t,p-e+1,!0);return u+Nh(t,h,p).trim()}return a===!0&&(u+=" ",A=2),u+Nh(t,p-e+A,p)}if(o==="middle"){a===!0&&(u=" "+u+" ",A=3);let h=Math.floor(e/2);if(n){let E=Ok(t,h),I=Ok(t,p-(e-h)+1,!0);return Nh(t,0,E)+u+Nh(t,I,p).trim()}return Nh(t,0,h)+u+Nh(t,p-(e-h)+A,p)}if(o==="end"){if(n){let h=Ok(t,e-1);return Nh(t,0,h)+u}return a===!0&&(u=" "+u,A=2),Nh(t,0,e-A)+u}throw new Error(`Expected \`options.position\` to be either \`start\`, \`middle\` or \`end\`, got ${o}`)}});var L6=_(z2=>{"use strict";var oCe=z2&&z2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(z2,"__esModule",{value:!0});var IEt=oCe(JEe()),BEt=oCe(sCe()),N6={};z2.default=(t,e,r)=>{let o=t+String(e)+String(r);if(N6[o])return N6[o];let a=t;if(r==="wrap"&&(a=IEt.default(t,e,{trim:!1,hard:!0})),r.startsWith("truncate")){let n="end";r==="truncate-middle"&&(n="middle"),r==="truncate-start"&&(n="start"),a=BEt.default(t,e,{position:n})}return N6[o]=a,a}});var O6=_(M6=>{"use strict";Object.defineProperty(M6,"__esModule",{value:!0});var aCe=t=>{let e="";if(t.childNodes.length>0)for(let r of t.childNodes){let o="";r.nodeName==="#text"?o=r.nodeValue:((r.nodeName==="ink-text"||r.nodeName==="ink-virtual-text")&&(o=aCe(r)),o.length>0&&typeof r.internal_transform=="function"&&(o=r.internal_transform(o))),e+=o}return e};M6.default=aCe});var U6=_(pi=>{"use strict";var J2=pi&&pi.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(pi,"__esModule",{value:!0});pi.setTextNodeValue=pi.createTextNode=pi.setStyle=pi.setAttribute=pi.removeChildNode=pi.insertBeforeNode=pi.appendChildNode=pi.createNode=pi.TEXT_NAME=void 0;var vEt=J2(Jg()),lCe=J2(WEe()),DEt=J2(KEe()),PEt=J2(L6()),SEt=J2(O6());pi.TEXT_NAME="#text";pi.createNode=t=>{var e;let r={nodeName:t,style:{},attributes:{},childNodes:[],parentNode:null,yogaNode:t==="ink-virtual-text"?void 0:vEt.default.Node.create()};return t==="ink-text"&&((e=r.yogaNode)===null||e===void 0||e.setMeasureFunc(bEt.bind(null,r))),r};pi.appendChildNode=(t,e)=>{var r;e.parentNode&&pi.removeChildNode(e.parentNode,e),e.parentNode=t,t.childNodes.push(e),e.yogaNode&&((r=t.yogaNode)===null||r===void 0||r.insertChild(e.yogaNode,t.yogaNode.getChildCount())),(t.nodeName==="ink-text"||t.nodeName==="ink-virtual-text")&&Uk(t)};pi.insertBeforeNode=(t,e,r)=>{var o,a;e.parentNode&&pi.removeChildNode(e.parentNode,e),e.parentNode=t;let n=t.childNodes.indexOf(r);if(n>=0){t.childNodes.splice(n,0,e),e.yogaNode&&((o=t.yogaNode)===null||o===void 0||o.insertChild(e.yogaNode,n));return}t.childNodes.push(e),e.yogaNode&&((a=t.yogaNode)===null||a===void 0||a.insertChild(e.yogaNode,t.yogaNode.getChildCount())),(t.nodeName==="ink-text"||t.nodeName==="ink-virtual-text")&&Uk(t)};pi.removeChildNode=(t,e)=>{var r,o;e.yogaNode&&((o=(r=e.parentNode)===null||r===void 0?void 0:r.yogaNode)===null||o===void 0||o.removeChild(e.yogaNode)),e.parentNode=null;let a=t.childNodes.indexOf(e);a>=0&&t.childNodes.splice(a,1),(t.nodeName==="ink-text"||t.nodeName==="ink-virtual-text")&&Uk(t)};pi.setAttribute=(t,e,r)=>{t.attributes[e]=r};pi.setStyle=(t,e)=>{t.style=e,t.yogaNode&&DEt.default(t.yogaNode,e)};pi.createTextNode=t=>{let e={nodeName:"#text",nodeValue:t,yogaNode:void 0,parentNode:null,style:{}};return pi.setTextNodeValue(e,t),e};var bEt=function(t,e){var r,o;let a=t.nodeName==="#text"?t.nodeValue:SEt.default(t),n=lCe.default(a);if(n.width<=e||n.width>=1&&e>0&&e<1)return n;let u=(o=(r=t.style)===null||r===void 0?void 0:r.textWrap)!==null&&o!==void 0?o:"wrap",A=PEt.default(a,e,u);return lCe.default(A)},cCe=t=>{var e;if(!(!t||!t.parentNode))return(e=t.yogaNode)!==null&&e!==void 0?e:cCe(t.parentNode)},Uk=t=>{let e=cCe(t);e?.markDirty()};pi.setTextNodeValue=(t,e)=>{typeof e!="string"&&(e=String(e)),t.nodeValue=e,Uk(t)}});var hCe=_(X2=>{"use strict";var pCe=X2&&X2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(X2,"__esModule",{value:!0});var uCe=w6(),xEt=pCe(FEe()),ACe=pCe(Jg()),Mo=U6(),fCe=t=>{t?.unsetMeasureFunc(),t?.freeRecursive()};X2.default=xEt.default({schedulePassiveEffects:uCe.unstable_scheduleCallback,cancelPassiveEffects:uCe.unstable_cancelCallback,now:Date.now,getRootHostContext:()=>({isInsideText:!1}),prepareForCommit:()=>{},resetAfterCommit:t=>{if(t.isStaticDirty){t.isStaticDirty=!1,typeof t.onImmediateRender=="function"&&t.onImmediateRender();return}typeof t.onRender=="function"&&t.onRender()},getChildHostContext:(t,e)=>{let r=t.isInsideText,o=e==="ink-text"||e==="ink-virtual-text";return r===o?t:{isInsideText:o}},shouldSetTextContent:()=>!1,createInstance:(t,e,r,o)=>{if(o.isInsideText&&t==="ink-box")throw new Error(" can\u2019t be nested inside component");let a=t==="ink-text"&&o.isInsideText?"ink-virtual-text":t,n=Mo.createNode(a);for(let[u,A]of Object.entries(e))u!=="children"&&(u==="style"?Mo.setStyle(n,A):u==="internal_transform"?n.internal_transform=A:u==="internal_static"?n.internal_static=!0:Mo.setAttribute(n,u,A));return n},createTextInstance:(t,e,r)=>{if(!r.isInsideText)throw new Error(`Text string "${t}" must be rendered inside component`);return Mo.createTextNode(t)},resetTextContent:()=>{},hideTextInstance:t=>{Mo.setTextNodeValue(t,"")},unhideTextInstance:(t,e)=>{Mo.setTextNodeValue(t,e)},getPublicInstance:t=>t,hideInstance:t=>{var e;(e=t.yogaNode)===null||e===void 0||e.setDisplay(ACe.default.DISPLAY_NONE)},unhideInstance:t=>{var e;(e=t.yogaNode)===null||e===void 0||e.setDisplay(ACe.default.DISPLAY_FLEX)},appendInitialChild:Mo.appendChildNode,appendChild:Mo.appendChildNode,insertBefore:Mo.insertBeforeNode,finalizeInitialChildren:(t,e,r,o)=>(t.internal_static&&(o.isStaticDirty=!0,o.staticNode=t),!1),supportsMutation:!0,appendChildToContainer:Mo.appendChildNode,insertInContainerBefore:Mo.insertBeforeNode,removeChildFromContainer:(t,e)=>{Mo.removeChildNode(t,e),fCe(e.yogaNode)},prepareUpdate:(t,e,r,o,a)=>{t.internal_static&&(a.isStaticDirty=!0);let n={},u=Object.keys(o);for(let A of u)if(o[A]!==r[A]){if(A==="style"&&typeof o.style=="object"&&typeof r.style=="object"){let h=o.style,E=r.style,I=Object.keys(h);for(let v of I){if(v==="borderStyle"||v==="borderColor"){if(typeof n.style!="object"){let x={};n.style=x}n.style.borderStyle=h.borderStyle,n.style.borderColor=h.borderColor}if(h[v]!==E[v]){if(typeof n.style!="object"){let x={};n.style=x}n.style[v]=h[v]}}continue}n[A]=o[A]}return n},commitUpdate:(t,e)=>{for(let[r,o]of Object.entries(e))r!=="children"&&(r==="style"?Mo.setStyle(t,o):r==="internal_transform"?t.internal_transform=o:r==="internal_static"?t.internal_static=!0:Mo.setAttribute(t,r,o))},commitTextUpdate:(t,e,r)=>{Mo.setTextNodeValue(t,r)},removeChild:(t,e)=>{Mo.removeChildNode(t,e),fCe(e.yogaNode)}})});var dCe=_((qKt,gCe)=>{"use strict";gCe.exports=(t,e=1,r)=>{if(r={indent:" ",includeEmptyLines:!1,...r},typeof t!="string")throw new TypeError(`Expected \`input\` to be a \`string\`, got \`${typeof t}\``);if(typeof e!="number")throw new TypeError(`Expected \`count\` to be a \`number\`, got \`${typeof e}\``);if(typeof r.indent!="string")throw new TypeError(`Expected \`options.indent\` to be a \`string\`, got \`${typeof r.indent}\``);if(e===0)return t;let o=r.includeEmptyLines?/^/gm:/^(?!\s*$)/gm;return t.replace(o,r.indent.repeat(e))}});var mCe=_(Z2=>{"use strict";var kEt=Z2&&Z2.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(Z2,"__esModule",{value:!0});var _k=kEt(Jg());Z2.default=t=>t.getComputedWidth()-t.getComputedPadding(_k.default.EDGE_LEFT)-t.getComputedPadding(_k.default.EDGE_RIGHT)-t.getComputedBorder(_k.default.EDGE_LEFT)-t.getComputedBorder(_k.default.EDGE_RIGHT)});var yCe=_((GKt,QEt)=>{QEt.exports={single:{topLeft:"\u250C",topRight:"\u2510",bottomRight:"\u2518",bottomLeft:"\u2514",vertical:"\u2502",horizontal:"\u2500"},double:{topLeft:"\u2554",topRight:"\u2557",bottomRight:"\u255D",bottomLeft:"\u255A",vertical:"\u2551",horizontal:"\u2550"},round:{topLeft:"\u256D",topRight:"\u256E",bottomRight:"\u256F",bottomLeft:"\u2570",vertical:"\u2502",horizontal:"\u2500"},bold:{topLeft:"\u250F",topRight:"\u2513",bottomRight:"\u251B",bottomLeft:"\u2517",vertical:"\u2503",horizontal:"\u2501"},singleDouble:{topLeft:"\u2553",topRight:"\u2556",bottomRight:"\u255C",bottomLeft:"\u2559",vertical:"\u2551",horizontal:"\u2500"},doubleSingle:{topLeft:"\u2552",topRight:"\u2555",bottomRight:"\u255B",bottomLeft:"\u2558",vertical:"\u2502",horizontal:"\u2550"},classic:{topLeft:"+",topRight:"+",bottomRight:"+",bottomLeft:"+",vertical:"|",horizontal:"-"}}});var CCe=_((YKt,_6)=>{"use strict";var ECe=yCe();_6.exports=ECe;_6.exports.default=ECe});var ICe=_((WKt,wCe)=>{"use strict";var FEt=(t,e,r)=>{let o=t.indexOf(e);if(o===-1)return t;let a=e.length,n=0,u="";do u+=t.substr(n,o-n)+e+r,n=o+a,o=t.indexOf(e,n);while(o!==-1);return u+=t.substr(n),u},REt=(t,e,r,o)=>{let a=0,n="";do{let u=t[o-1]==="\r";n+=t.substr(a,(u?o-1:o)-a)+e+(u?`\r +`:` +`)+r,a=o+1,o=t.indexOf(` +`,a)}while(o!==-1);return n+=t.substr(a),n};wCe.exports={stringReplaceAll:FEt,stringEncaseCRLFWithFirstIndex:REt}});var SCe=_((KKt,PCe)=>{"use strict";var TEt=/(?:\\(u(?:[a-f\d]{4}|\{[a-f\d]{1,6}\})|x[a-f\d]{2}|.))|(?:\{(~)?(\w+(?:\([^)]*\))?(?:\.\w+(?:\([^)]*\))?)*)(?:[ \t]|(?=\r?\n)))|(\})|((?:.|[\r\n\f])+?)/gi,BCe=/(?:^|\.)(\w+)(?:\(([^)]*)\))?/g,NEt=/^(['"])((?:\\.|(?!\1)[^\\])*)\1$/,LEt=/\\(u(?:[a-f\d]{4}|{[a-f\d]{1,6}})|x[a-f\d]{2}|.)|([^\\])/gi,MEt=new Map([["n",` +`],["r","\r"],["t"," "],["b","\b"],["f","\f"],["v","\v"],["0","\0"],["\\","\\"],["e","\x1B"],["a","\x07"]]);function DCe(t){let e=t[0]==="u",r=t[1]==="{";return e&&!r&&t.length===5||t[0]==="x"&&t.length===3?String.fromCharCode(parseInt(t.slice(1),16)):e&&r?String.fromCodePoint(parseInt(t.slice(2,-1),16)):MEt.get(t)||t}function OEt(t,e){let r=[],o=e.trim().split(/\s*,\s*/g),a;for(let n of o){let u=Number(n);if(!Number.isNaN(u))r.push(u);else if(a=n.match(NEt))r.push(a[2].replace(LEt,(A,p,h)=>p?DCe(p):h));else throw new Error(`Invalid Chalk template style argument: ${n} (in style '${t}')`)}return r}function UEt(t){BCe.lastIndex=0;let e=[],r;for(;(r=BCe.exec(t))!==null;){let o=r[1];if(r[2]){let a=OEt(o,r[2]);e.push([o].concat(a))}else e.push([o])}return e}function vCe(t,e){let r={};for(let a of e)for(let n of a.styles)r[n[0]]=a.inverse?null:n.slice(1);let o=t;for(let[a,n]of Object.entries(r))if(Array.isArray(n)){if(!(a in o))throw new Error(`Unknown Chalk style: ${a}`);o=n.length>0?o[a](...n):o[a]}return o}PCe.exports=(t,e)=>{let r=[],o=[],a=[];if(e.replace(TEt,(n,u,A,p,h,E)=>{if(u)a.push(DCe(u));else if(p){let I=a.join("");a=[],o.push(r.length===0?I:vCe(t,r)(I)),r.push({inverse:A,styles:UEt(p)})}else if(h){if(r.length===0)throw new Error("Found extraneous } in Chalk template literal");o.push(vCe(t,r)(a.join(""))),a=[],r.pop()}else a.push(E)}),o.push(a.join("")),r.length>0){let n=`Chalk template literal is missing ${r.length} closing bracket${r.length===1?"":"s"} (\`}\`)`;throw new Error(n)}return o.join("")}});var Yk=_((VKt,RCe)=>{"use strict";var $2=aI(),{stdout:q6,stderr:j6}=aN(),{stringReplaceAll:_Et,stringEncaseCRLFWithFirstIndex:HEt}=ICe(),{isArray:Hk}=Array,xCe=["ansi","ansi","ansi256","ansi16m"],nC=Object.create(null),qEt=(t,e={})=>{if(e.level&&!(Number.isInteger(e.level)&&e.level>=0&&e.level<=3))throw new Error("The `level` option should be an integer from 0 to 3");let r=q6?q6.level:0;t.level=e.level===void 0?r:e.level},G6=class{constructor(e){return kCe(e)}},kCe=t=>{let e={};return qEt(e,t),e.template=(...r)=>FCe(e.template,...r),Object.setPrototypeOf(e,qk.prototype),Object.setPrototypeOf(e.template,e),e.template.constructor=()=>{throw new Error("`chalk.constructor()` is deprecated. Use `new chalk.Instance()` instead.")},e.template.Instance=G6,e.template};function qk(t){return kCe(t)}for(let[t,e]of Object.entries($2))nC[t]={get(){let r=jk(this,Y6(e.open,e.close,this._styler),this._isEmpty);return Object.defineProperty(this,t,{value:r}),r}};nC.visible={get(){let t=jk(this,this._styler,!0);return Object.defineProperty(this,"visible",{value:t}),t}};var QCe=["rgb","hex","keyword","hsl","hsv","hwb","ansi","ansi256"];for(let t of QCe)nC[t]={get(){let{level:e}=this;return function(...r){let o=Y6($2.color[xCe[e]][t](...r),$2.color.close,this._styler);return jk(this,o,this._isEmpty)}}};for(let t of QCe){let e="bg"+t[0].toUpperCase()+t.slice(1);nC[e]={get(){let{level:r}=this;return function(...o){let a=Y6($2.bgColor[xCe[r]][t](...o),$2.bgColor.close,this._styler);return jk(this,a,this._isEmpty)}}}}var jEt=Object.defineProperties(()=>{},{...nC,level:{enumerable:!0,get(){return this._generator.level},set(t){this._generator.level=t}}}),Y6=(t,e,r)=>{let o,a;return r===void 0?(o=t,a=e):(o=r.openAll+t,a=e+r.closeAll),{open:t,close:e,openAll:o,closeAll:a,parent:r}},jk=(t,e,r)=>{let o=(...a)=>Hk(a[0])&&Hk(a[0].raw)?bCe(o,FCe(o,...a)):bCe(o,a.length===1?""+a[0]:a.join(" "));return Object.setPrototypeOf(o,jEt),o._generator=t,o._styler=e,o._isEmpty=r,o},bCe=(t,e)=>{if(t.level<=0||!e)return t._isEmpty?"":e;let r=t._styler;if(r===void 0)return e;let{openAll:o,closeAll:a}=r;if(e.indexOf("\x1B")!==-1)for(;r!==void 0;)e=_Et(e,r.close,r.open),r=r.parent;let n=e.indexOf(` +`);return n!==-1&&(e=HEt(e,a,o,n)),o+e+a},H6,FCe=(t,...e)=>{let[r]=e;if(!Hk(r)||!Hk(r.raw))return e.join(" ");let o=e.slice(1),a=[r.raw[0]];for(let n=1;n{"use strict";var GEt=tB&&tB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(tB,"__esModule",{value:!0});var eB=GEt(Yk()),YEt=/^(rgb|hsl|hsv|hwb)\(\s?(\d+),\s?(\d+),\s?(\d+)\s?\)$/,WEt=/^(ansi|ansi256)\(\s?(\d+)\s?\)$/,Wk=(t,e)=>e==="foreground"?t:"bg"+t[0].toUpperCase()+t.slice(1);tB.default=(t,e,r)=>{if(!e)return t;if(e in eB.default){let a=Wk(e,r);return eB.default[a](t)}if(e.startsWith("#")){let a=Wk("hex",r);return eB.default[a](e)(t)}if(e.startsWith("ansi")){let a=WEt.exec(e);if(!a)return t;let n=Wk(a[1],r),u=Number(a[2]);return eB.default[n](u)(t)}if(e.startsWith("rgb")||e.startsWith("hsl")||e.startsWith("hsv")||e.startsWith("hwb")){let a=YEt.exec(e);if(!a)return t;let n=Wk(a[1],r),u=Number(a[2]),A=Number(a[3]),p=Number(a[4]);return eB.default[n](u,A,p)(t)}return t}});var NCe=_(rB=>{"use strict";var TCe=rB&&rB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(rB,"__esModule",{value:!0});var KEt=TCe(CCe()),K6=TCe(W6());rB.default=(t,e,r,o)=>{if(typeof r.style.borderStyle=="string"){let a=r.yogaNode.getComputedWidth(),n=r.yogaNode.getComputedHeight(),u=r.style.borderColor,A=KEt.default[r.style.borderStyle],p=K6.default(A.topLeft+A.horizontal.repeat(a-2)+A.topRight,u,"foreground"),h=(K6.default(A.vertical,u,"foreground")+` +`).repeat(n-2),E=K6.default(A.bottomLeft+A.horizontal.repeat(a-2)+A.bottomRight,u,"foreground");o.write(t,e,p,{transformers:[]}),o.write(t,e+1,h,{transformers:[]}),o.write(t+a-1,e+1,h,{transformers:[]}),o.write(t,e+n-1,E,{transformers:[]})}}});var MCe=_(nB=>{"use strict";var Xg=nB&&nB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(nB,"__esModule",{value:!0});var VEt=Xg(Jg()),zEt=Xg(k6()),JEt=Xg(dCe()),XEt=Xg(L6()),ZEt=Xg(mCe()),$Et=Xg(O6()),eCt=Xg(NCe()),tCt=(t,e)=>{var r;let o=(r=t.childNodes[0])===null||r===void 0?void 0:r.yogaNode;if(o){let a=o.getComputedLeft(),n=o.getComputedTop();e=` +`.repeat(n)+JEt.default(e,a)}return e},LCe=(t,e,r)=>{var o;let{offsetX:a=0,offsetY:n=0,transformers:u=[],skipStaticElements:A}=r;if(A&&t.internal_static)return;let{yogaNode:p}=t;if(p){if(p.getDisplay()===VEt.default.DISPLAY_NONE)return;let h=a+p.getComputedLeft(),E=n+p.getComputedTop(),I=u;if(typeof t.internal_transform=="function"&&(I=[t.internal_transform,...u]),t.nodeName==="ink-text"){let v=$Et.default(t);if(v.length>0){let x=zEt.default(v),C=ZEt.default(p);if(x>C){let R=(o=t.style.textWrap)!==null&&o!==void 0?o:"wrap";v=XEt.default(v,C,R)}v=tCt(t,v),e.write(h,E,v,{transformers:I})}return}if(t.nodeName==="ink-box"&&eCt.default(h,E,t,e),t.nodeName==="ink-root"||t.nodeName==="ink-box")for(let v of t.childNodes)LCe(v,e,{offsetX:h,offsetY:E,transformers:I,skipStaticElements:A})}};nB.default=LCe});var UCe=_((ZKt,OCe)=>{"use strict";OCe.exports=t=>{t=Object.assign({onlyFirst:!1},t);let e=["[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)","(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))"].join("|");return new RegExp(e,t.onlyFirst?void 0:"g")}});var HCe=_(($Kt,V6)=>{"use strict";var rCt=UCe(),_Ce=t=>typeof t=="string"?t.replace(rCt(),""):t;V6.exports=_Ce;V6.exports.default=_Ce});var GCe=_((eVt,jCe)=>{"use strict";var qCe="[\uD800-\uDBFF][\uDC00-\uDFFF]";jCe.exports=t=>t&&t.exact?new RegExp(`^${qCe}$`):new RegExp(qCe,"g")});var WCe=_((tVt,z6)=>{"use strict";var nCt=HCe(),iCt=GCe(),YCe=t=>nCt(t).replace(iCt()," ").length;z6.exports=YCe;z6.exports.default=YCe});var zCe=_(iB=>{"use strict";var VCe=iB&&iB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(iB,"__esModule",{value:!0});var KCe=VCe(T6()),sCt=VCe(WCe()),J6=class{constructor(e){this.writes=[];let{width:r,height:o}=e;this.width=r,this.height=o}write(e,r,o,a){let{transformers:n}=a;o&&this.writes.push({x:e,y:r,text:o,transformers:n})}get(){let e=[];for(let o=0;oo.trimRight()).join(` +`),height:e.length}}};iB.default=J6});var ZCe=_(sB=>{"use strict";var X6=sB&&sB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(sB,"__esModule",{value:!0});var oCt=X6(Jg()),JCe=X6(MCe()),XCe=X6(zCe());sB.default=(t,e)=>{var r;if(t.yogaNode.setWidth(e),t.yogaNode){t.yogaNode.calculateLayout(void 0,void 0,oCt.default.DIRECTION_LTR);let o=new XCe.default({width:t.yogaNode.getComputedWidth(),height:t.yogaNode.getComputedHeight()});JCe.default(t,o,{skipStaticElements:!0});let a;!((r=t.staticNode)===null||r===void 0)&&r.yogaNode&&(a=new XCe.default({width:t.staticNode.yogaNode.getComputedWidth(),height:t.staticNode.yogaNode.getComputedHeight()}),JCe.default(t.staticNode,a,{skipStaticElements:!1}));let{output:n,height:u}=o.get();return{output:n,outputHeight:u,staticOutput:a?`${a.get().output} +`:""}}return{output:"",outputHeight:0,staticOutput:""}}});var rwe=_((iVt,twe)=>{"use strict";var $Ce=ve("stream"),ewe=["assert","count","countReset","debug","dir","dirxml","error","group","groupCollapsed","groupEnd","info","log","table","time","timeEnd","timeLog","trace","warn"],Z6={},aCt=t=>{let e=new $Ce.PassThrough,r=new $Ce.PassThrough;e.write=a=>t("stdout",a),r.write=a=>t("stderr",a);let o=new console.Console(e,r);for(let a of ewe)Z6[a]=console[a],console[a]=o[a];return()=>{for(let a of ewe)console[a]=Z6[a];Z6={}}};twe.exports=aCt});var eq=_($6=>{"use strict";Object.defineProperty($6,"__esModule",{value:!0});$6.default=new WeakMap});var rq=_(tq=>{"use strict";Object.defineProperty(tq,"__esModule",{value:!0});var lCt=an(),nwe=lCt.createContext({exit:()=>{}});nwe.displayName="InternalAppContext";tq.default=nwe});var iq=_(nq=>{"use strict";Object.defineProperty(nq,"__esModule",{value:!0});var cCt=an(),iwe=cCt.createContext({stdin:void 0,setRawMode:()=>{},isRawModeSupported:!1,internal_exitOnCtrlC:!0});iwe.displayName="InternalStdinContext";nq.default=iwe});var oq=_(sq=>{"use strict";Object.defineProperty(sq,"__esModule",{value:!0});var uCt=an(),swe=uCt.createContext({stdout:void 0,write:()=>{}});swe.displayName="InternalStdoutContext";sq.default=swe});var lq=_(aq=>{"use strict";Object.defineProperty(aq,"__esModule",{value:!0});var ACt=an(),owe=ACt.createContext({stderr:void 0,write:()=>{}});owe.displayName="InternalStderrContext";aq.default=owe});var Kk=_(cq=>{"use strict";Object.defineProperty(cq,"__esModule",{value:!0});var fCt=an(),awe=fCt.createContext({activeId:void 0,add:()=>{},remove:()=>{},activate:()=>{},deactivate:()=>{},enableFocus:()=>{},disableFocus:()=>{},focusNext:()=>{},focusPrevious:()=>{}});awe.displayName="InternalFocusContext";cq.default=awe});var cwe=_((AVt,lwe)=>{"use strict";var pCt=/[|\\{}()[\]^$+*?.-]/g;lwe.exports=t=>{if(typeof t!="string")throw new TypeError("Expected a string");return t.replace(pCt,"\\$&")}});var pwe=_((fVt,fwe)=>{"use strict";var hCt=cwe(),gCt=typeof process=="object"&&process&&typeof process.cwd=="function"?process.cwd():".",Awe=[].concat(ve("module").builtinModules,"bootstrap_node","node").map(t=>new RegExp(`(?:\\((?:node:)?${t}(?:\\.js)?:\\d+:\\d+\\)$|^\\s*at (?:node:)?${t}(?:\\.js)?:\\d+:\\d+$)`));Awe.push(/\((?:node:)?internal\/[^:]+:\d+:\d+\)$/,/\s*at (?:node:)?internal\/[^:]+:\d+:\d+$/,/\/\.node-spawn-wrap-\w+-\w+\/node:\d+:\d+\)?$/);var uq=class t{constructor(e){e={ignoredPackages:[],...e},"internals"in e||(e.internals=t.nodeInternals()),"cwd"in e||(e.cwd=gCt),this._cwd=e.cwd.replace(/\\/g,"/"),this._internals=[].concat(e.internals,dCt(e.ignoredPackages)),this._wrapCallSite=e.wrapCallSite||!1}static nodeInternals(){return[...Awe]}clean(e,r=0){r=" ".repeat(r),Array.isArray(e)||(e=e.split(` +`)),!/^\s*at /.test(e[0])&&/^\s*at /.test(e[1])&&(e=e.slice(1));let o=!1,a=null,n=[];return e.forEach(u=>{if(u=u.replace(/\\/g,"/"),this._internals.some(p=>p.test(u)))return;let A=/^\s*at /.test(u);o?u=u.trimEnd().replace(/^(\s+)at /,"$1"):(u=u.trim(),A&&(u=u.slice(3))),u=u.replace(`${this._cwd}/`,""),u&&(A?(a&&(n.push(a),a=null),n.push(u)):(o=!0,a=u))}),n.map(u=>`${r}${u} +`).join("")}captureString(e,r=this.captureString){typeof e=="function"&&(r=e,e=1/0);let{stackTraceLimit:o}=Error;e&&(Error.stackTraceLimit=e);let a={};Error.captureStackTrace(a,r);let{stack:n}=a;return Error.stackTraceLimit=o,this.clean(n)}capture(e,r=this.capture){typeof e=="function"&&(r=e,e=1/0);let{prepareStackTrace:o,stackTraceLimit:a}=Error;Error.prepareStackTrace=(A,p)=>this._wrapCallSite?p.map(this._wrapCallSite):p,e&&(Error.stackTraceLimit=e);let n={};Error.captureStackTrace(n,r);let{stack:u}=n;return Object.assign(Error,{prepareStackTrace:o,stackTraceLimit:a}),u}at(e=this.at){let[r]=this.capture(1,e);if(!r)return{};let o={line:r.getLineNumber(),column:r.getColumnNumber()};uwe(o,r.getFileName(),this._cwd),r.isConstructor()&&(o.constructor=!0),r.isEval()&&(o.evalOrigin=r.getEvalOrigin()),r.isNative()&&(o.native=!0);let a;try{a=r.getTypeName()}catch{}a&&a!=="Object"&&a!=="[object Object]"&&(o.type=a);let n=r.getFunctionName();n&&(o.function=n);let u=r.getMethodName();return u&&n!==u&&(o.method=u),o}parseLine(e){let r=e&&e.match(mCt);if(!r)return null;let o=r[1]==="new",a=r[2],n=r[3],u=r[4],A=Number(r[5]),p=Number(r[6]),h=r[7],E=r[8],I=r[9],v=r[10]==="native",x=r[11]===")",C,R={};if(E&&(R.line=Number(E)),I&&(R.column=Number(I)),x&&h){let L=0;for(let U=h.length-1;U>0;U--)if(h.charAt(U)===")")L++;else if(h.charAt(U)==="("&&h.charAt(U-1)===" "&&(L--,L===-1&&h.charAt(U-1)===" ")){let z=h.slice(0,U-1);h=h.slice(U+1),a+=` (${z}`;break}}if(a){let L=a.match(yCt);L&&(a=L[1],C=L[2])}return uwe(R,h,this._cwd),o&&(R.constructor=!0),n&&(R.evalOrigin=n,R.evalLine=A,R.evalColumn=p,R.evalFile=u&&u.replace(/\\/g,"/")),v&&(R.native=!0),a&&(R.function=a),C&&a!==C&&(R.method=C),R}};function uwe(t,e,r){e&&(e=e.replace(/\\/g,"/"),e.startsWith(`${r}/`)&&(e=e.slice(r.length+1)),t.file=e)}function dCt(t){if(t.length===0)return[];let e=t.map(r=>hCt(r));return new RegExp(`[/\\\\]node_modules[/\\\\](?:${e.join("|")})[/\\\\][^:]+:\\d+:\\d+`)}var mCt=new RegExp("^(?:\\s*at )?(?:(new) )?(?:(.*?) \\()?(?:eval at ([^ ]+) \\((.+?):(\\d+):(\\d+)\\), )?(?:(.+?):(\\d+):(\\d+)|(native))(\\)?)$"),yCt=/^(.*?) \[as (.*?)\]$/;fwe.exports=uq});var gwe=_((pVt,hwe)=>{"use strict";hwe.exports=(t,e)=>t.replace(/^\t+/gm,r=>" ".repeat(r.length*(e||2)))});var mwe=_((hVt,dwe)=>{"use strict";var ECt=gwe(),CCt=(t,e)=>{let r=[],o=t-e,a=t+e;for(let n=o;n<=a;n++)r.push(n);return r};dwe.exports=(t,e,r)=>{if(typeof t!="string")throw new TypeError("Source code is missing.");if(!e||e<1)throw new TypeError("Line number must start from `1`.");if(t=ECt(t).split(/\r?\n/),!(e>t.length))return r={around:3,...r},CCt(e,r.around).filter(o=>t[o-1]!==void 0).map(o=>({line:o,value:t[o-1]}))}});var Vk=_(iu=>{"use strict";var wCt=iu&&iu.__createBinding||(Object.create?function(t,e,r,o){o===void 0&&(o=r),Object.defineProperty(t,o,{enumerable:!0,get:function(){return e[r]}})}:function(t,e,r,o){o===void 0&&(o=r),t[o]=e[r]}),ICt=iu&&iu.__setModuleDefault||(Object.create?function(t,e){Object.defineProperty(t,"default",{enumerable:!0,value:e})}:function(t,e){t.default=e}),BCt=iu&&iu.__importStar||function(t){if(t&&t.__esModule)return t;var e={};if(t!=null)for(var r in t)r!=="default"&&Object.hasOwnProperty.call(t,r)&&wCt(e,t,r);return ICt(e,t),e},vCt=iu&&iu.__rest||function(t,e){var r={};for(var o in t)Object.prototype.hasOwnProperty.call(t,o)&&e.indexOf(o)<0&&(r[o]=t[o]);if(t!=null&&typeof Object.getOwnPropertySymbols=="function")for(var a=0,o=Object.getOwnPropertySymbols(t);a{var{children:r}=t,o=vCt(t,["children"]);let a=Object.assign(Object.assign({},o),{marginLeft:o.marginLeft||o.marginX||o.margin||0,marginRight:o.marginRight||o.marginX||o.margin||0,marginTop:o.marginTop||o.marginY||o.margin||0,marginBottom:o.marginBottom||o.marginY||o.margin||0,paddingLeft:o.paddingLeft||o.paddingX||o.padding||0,paddingRight:o.paddingRight||o.paddingX||o.padding||0,paddingTop:o.paddingTop||o.paddingY||o.padding||0,paddingBottom:o.paddingBottom||o.paddingY||o.padding||0});return ywe.default.createElement("ink-box",{ref:e,style:a},r)});Aq.displayName="Box";Aq.defaultProps={flexDirection:"row",flexGrow:0,flexShrink:1};iu.default=Aq});var hq=_(oB=>{"use strict";var fq=oB&&oB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(oB,"__esModule",{value:!0});var DCt=fq(an()),iC=fq(Yk()),Ewe=fq(W6()),pq=({color:t,backgroundColor:e,dimColor:r,bold:o,italic:a,underline:n,strikethrough:u,inverse:A,wrap:p,children:h})=>{if(h==null)return null;let E=I=>(r&&(I=iC.default.dim(I)),t&&(I=Ewe.default(I,t,"foreground")),e&&(I=Ewe.default(I,e,"background")),o&&(I=iC.default.bold(I)),a&&(I=iC.default.italic(I)),n&&(I=iC.default.underline(I)),u&&(I=iC.default.strikethrough(I)),A&&(I=iC.default.inverse(I)),I);return DCt.default.createElement("ink-text",{style:{flexGrow:0,flexShrink:1,flexDirection:"row",textWrap:p},internal_transform:E},h)};pq.displayName="Text";pq.defaultProps={dimColor:!1,bold:!1,italic:!1,underline:!1,strikethrough:!1,wrap:"wrap"};oB.default=pq});var Bwe=_(su=>{"use strict";var PCt=su&&su.__createBinding||(Object.create?function(t,e,r,o){o===void 0&&(o=r),Object.defineProperty(t,o,{enumerable:!0,get:function(){return e[r]}})}:function(t,e,r,o){o===void 0&&(o=r),t[o]=e[r]}),SCt=su&&su.__setModuleDefault||(Object.create?function(t,e){Object.defineProperty(t,"default",{enumerable:!0,value:e})}:function(t,e){t.default=e}),bCt=su&&su.__importStar||function(t){if(t&&t.__esModule)return t;var e={};if(t!=null)for(var r in t)r!=="default"&&Object.hasOwnProperty.call(t,r)&&PCt(e,t,r);return SCt(e,t),e},aB=su&&su.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(su,"__esModule",{value:!0});var Cwe=bCt(ve("fs")),hs=aB(an()),wwe=aB(pwe()),xCt=aB(mwe()),Vf=aB(Vk()),hA=aB(hq()),Iwe=new wwe.default({cwd:process.cwd(),internals:wwe.default.nodeInternals()}),kCt=({error:t})=>{let e=t.stack?t.stack.split(` +`).slice(1):void 0,r=e?Iwe.parseLine(e[0]):void 0,o,a=0;if(r?.file&&r?.line&&Cwe.existsSync(r.file)){let n=Cwe.readFileSync(r.file,"utf8");if(o=xCt.default(n,r.line),o)for(let{line:u}of o)a=Math.max(a,String(u).length)}return hs.default.createElement(Vf.default,{flexDirection:"column",padding:1},hs.default.createElement(Vf.default,null,hs.default.createElement(hA.default,{backgroundColor:"red",color:"white"}," ","ERROR"," "),hs.default.createElement(hA.default,null," ",t.message)),r&&hs.default.createElement(Vf.default,{marginTop:1},hs.default.createElement(hA.default,{dimColor:!0},r.file,":",r.line,":",r.column)),r&&o&&hs.default.createElement(Vf.default,{marginTop:1,flexDirection:"column"},o.map(({line:n,value:u})=>hs.default.createElement(Vf.default,{key:n},hs.default.createElement(Vf.default,{width:a+1},hs.default.createElement(hA.default,{dimColor:n!==r.line,backgroundColor:n===r.line?"red":void 0,color:n===r.line?"white":void 0},String(n).padStart(a," "),":")),hs.default.createElement(hA.default,{key:n,backgroundColor:n===r.line?"red":void 0,color:n===r.line?"white":void 0}," "+u)))),t.stack&&hs.default.createElement(Vf.default,{marginTop:1,flexDirection:"column"},t.stack.split(` +`).slice(1).map(n=>{let u=Iwe.parseLine(n);return u?hs.default.createElement(Vf.default,{key:n},hs.default.createElement(hA.default,{dimColor:!0},"- "),hs.default.createElement(hA.default,{dimColor:!0,bold:!0},u.function),hs.default.createElement(hA.default,{dimColor:!0,color:"gray"}," ","(",u.file,":",u.line,":",u.column,")")):hs.default.createElement(Vf.default,{key:n},hs.default.createElement(hA.default,{dimColor:!0},"- "),hs.default.createElement(hA.default,{dimColor:!0,bold:!0},n))})))};su.default=kCt});var Dwe=_(ou=>{"use strict";var QCt=ou&&ou.__createBinding||(Object.create?function(t,e,r,o){o===void 0&&(o=r),Object.defineProperty(t,o,{enumerable:!0,get:function(){return e[r]}})}:function(t,e,r,o){o===void 0&&(o=r),t[o]=e[r]}),FCt=ou&&ou.__setModuleDefault||(Object.create?function(t,e){Object.defineProperty(t,"default",{enumerable:!0,value:e})}:function(t,e){t.default=e}),RCt=ou&&ou.__importStar||function(t){if(t&&t.__esModule)return t;var e={};if(t!=null)for(var r in t)r!=="default"&&Object.hasOwnProperty.call(t,r)&&QCt(e,t,r);return FCt(e,t),e},$g=ou&&ou.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(ou,"__esModule",{value:!0});var Zg=RCt(an()),vwe=$g(u6()),TCt=$g(rq()),NCt=$g(iq()),LCt=$g(oq()),MCt=$g(lq()),OCt=$g(Kk()),UCt=$g(Bwe()),_Ct=" ",HCt="\x1B[Z",qCt="\x1B",zk=class extends Zg.PureComponent{constructor(){super(...arguments),this.state={isFocusEnabled:!0,activeFocusId:void 0,focusables:[],error:void 0},this.rawModeEnabledCount=0,this.handleSetRawMode=e=>{let{stdin:r}=this.props;if(!this.isRawModeSupported())throw r===process.stdin?new Error(`Raw mode is not supported on the current process.stdin, which Ink uses as input stream by default. +Read about how to prevent this error on https://github.com/vadimdemedes/ink/#israwmodesupported`):new Error(`Raw mode is not supported on the stdin provided to Ink. +Read about how to prevent this error on https://github.com/vadimdemedes/ink/#israwmodesupported`);if(r.setEncoding("utf8"),e){this.rawModeEnabledCount===0&&(r.addListener("data",this.handleInput),r.resume(),r.setRawMode(!0)),this.rawModeEnabledCount++;return}--this.rawModeEnabledCount===0&&(r.setRawMode(!1),r.removeListener("data",this.handleInput),r.pause())},this.handleInput=e=>{e===""&&this.props.exitOnCtrlC&&this.handleExit(),e===qCt&&this.state.activeFocusId&&this.setState({activeFocusId:void 0}),this.state.isFocusEnabled&&this.state.focusables.length>0&&(e===_Ct&&this.focusNext(),e===HCt&&this.focusPrevious())},this.handleExit=e=>{this.isRawModeSupported()&&this.handleSetRawMode(!1),this.props.onExit(e)},this.enableFocus=()=>{this.setState({isFocusEnabled:!0})},this.disableFocus=()=>{this.setState({isFocusEnabled:!1})},this.focusNext=()=>{this.setState(e=>{let r=e.focusables[0].id;return{activeFocusId:this.findNextFocusable(e)||r}})},this.focusPrevious=()=>{this.setState(e=>{let r=e.focusables[e.focusables.length-1].id;return{activeFocusId:this.findPreviousFocusable(e)||r}})},this.addFocusable=(e,{autoFocus:r})=>{this.setState(o=>{let a=o.activeFocusId;return!a&&r&&(a=e),{activeFocusId:a,focusables:[...o.focusables,{id:e,isActive:!0}]}})},this.removeFocusable=e=>{this.setState(r=>({activeFocusId:r.activeFocusId===e?void 0:r.activeFocusId,focusables:r.focusables.filter(o=>o.id!==e)}))},this.activateFocusable=e=>{this.setState(r=>({focusables:r.focusables.map(o=>o.id!==e?o:{id:e,isActive:!0})}))},this.deactivateFocusable=e=>{this.setState(r=>({activeFocusId:r.activeFocusId===e?void 0:r.activeFocusId,focusables:r.focusables.map(o=>o.id!==e?o:{id:e,isActive:!1})}))},this.findNextFocusable=e=>{let r=e.focusables.findIndex(o=>o.id===e.activeFocusId);for(let o=r+1;o{let r=e.focusables.findIndex(o=>o.id===e.activeFocusId);for(let o=r-1;o>=0;o--)if(e.focusables[o].isActive)return e.focusables[o].id}}static getDerivedStateFromError(e){return{error:e}}isRawModeSupported(){return this.props.stdin.isTTY}render(){return Zg.default.createElement(TCt.default.Provider,{value:{exit:this.handleExit}},Zg.default.createElement(NCt.default.Provider,{value:{stdin:this.props.stdin,setRawMode:this.handleSetRawMode,isRawModeSupported:this.isRawModeSupported(),internal_exitOnCtrlC:this.props.exitOnCtrlC}},Zg.default.createElement(LCt.default.Provider,{value:{stdout:this.props.stdout,write:this.props.writeToStdout}},Zg.default.createElement(MCt.default.Provider,{value:{stderr:this.props.stderr,write:this.props.writeToStderr}},Zg.default.createElement(OCt.default.Provider,{value:{activeId:this.state.activeFocusId,add:this.addFocusable,remove:this.removeFocusable,activate:this.activateFocusable,deactivate:this.deactivateFocusable,enableFocus:this.enableFocus,disableFocus:this.disableFocus,focusNext:this.focusNext,focusPrevious:this.focusPrevious}},this.state.error?Zg.default.createElement(UCt.default,{error:this.state.error}):this.props.children)))))}componentDidMount(){vwe.default.hide(this.props.stdout)}componentWillUnmount(){vwe.default.show(this.props.stdout),this.isRawModeSupported()&&this.handleSetRawMode(!1)}componentDidCatch(e){this.handleExit(e)}};ou.default=zk;zk.displayName="InternalApp"});var bwe=_(au=>{"use strict";var jCt=au&&au.__createBinding||(Object.create?function(t,e,r,o){o===void 0&&(o=r),Object.defineProperty(t,o,{enumerable:!0,get:function(){return e[r]}})}:function(t,e,r,o){o===void 0&&(o=r),t[o]=e[r]}),GCt=au&&au.__setModuleDefault||(Object.create?function(t,e){Object.defineProperty(t,"default",{enumerable:!0,value:e})}:function(t,e){t.default=e}),YCt=au&&au.__importStar||function(t){if(t&&t.__esModule)return t;var e={};if(t!=null)for(var r in t)r!=="default"&&Object.hasOwnProperty.call(t,r)&&jCt(e,t,r);return GCt(e,t),e},lu=au&&au.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(au,"__esModule",{value:!0});var WCt=lu(an()),Pwe=eO(),KCt=lu(pEe()),VCt=lu(s6()),zCt=lu(EEe()),JCt=lu(wEe()),gq=lu(hCe()),XCt=lu(ZCe()),ZCt=lu(c6()),$Ct=lu(rwe()),ewt=YCt(U6()),twt=lu(eq()),rwt=lu(Dwe()),sC=process.env.CI==="false"?!1:zCt.default,Swe=()=>{},dq=class{constructor(e){this.resolveExitPromise=()=>{},this.rejectExitPromise=()=>{},this.unsubscribeExit=()=>{},this.onRender=()=>{if(this.isUnmounted)return;let{output:r,outputHeight:o,staticOutput:a}=XCt.default(this.rootNode,this.options.stdout.columns||80),n=a&&a!==` +`;if(this.options.debug){n&&(this.fullStaticOutput+=a),this.options.stdout.write(this.fullStaticOutput+r);return}if(sC){n&&this.options.stdout.write(a),this.lastOutput=r;return}if(n&&(this.fullStaticOutput+=a),o>=this.options.stdout.rows){this.options.stdout.write(VCt.default.clearTerminal+this.fullStaticOutput+r),this.lastOutput=r;return}n&&(this.log.clear(),this.options.stdout.write(a),this.log(r)),!n&&r!==this.lastOutput&&this.throttledLog(r),this.lastOutput=r},JCt.default(this),this.options=e,this.rootNode=ewt.createNode("ink-root"),this.rootNode.onRender=e.debug?this.onRender:Pwe(this.onRender,32,{leading:!0,trailing:!0}),this.rootNode.onImmediateRender=this.onRender,this.log=KCt.default.create(e.stdout),this.throttledLog=e.debug?this.log:Pwe(this.log,void 0,{leading:!0,trailing:!0}),this.isUnmounted=!1,this.lastOutput="",this.fullStaticOutput="",this.container=gq.default.createContainer(this.rootNode,!1,!1),this.unsubscribeExit=ZCt.default(this.unmount,{alwaysLast:!1}),e.patchConsole&&this.patchConsole(),sC||(e.stdout.on("resize",this.onRender),this.unsubscribeResize=()=>{e.stdout.off("resize",this.onRender)})}render(e){let r=WCt.default.createElement(rwt.default,{stdin:this.options.stdin,stdout:this.options.stdout,stderr:this.options.stderr,writeToStdout:this.writeToStdout,writeToStderr:this.writeToStderr,exitOnCtrlC:this.options.exitOnCtrlC,onExit:this.unmount},e);gq.default.updateContainer(r,this.container,null,Swe)}writeToStdout(e){if(!this.isUnmounted){if(this.options.debug){this.options.stdout.write(e+this.fullStaticOutput+this.lastOutput);return}if(sC){this.options.stdout.write(e);return}this.log.clear(),this.options.stdout.write(e),this.log(this.lastOutput)}}writeToStderr(e){if(!this.isUnmounted){if(this.options.debug){this.options.stderr.write(e),this.options.stdout.write(this.fullStaticOutput+this.lastOutput);return}if(sC){this.options.stderr.write(e);return}this.log.clear(),this.options.stderr.write(e),this.log(this.lastOutput)}}unmount(e){this.isUnmounted||(this.onRender(),this.unsubscribeExit(),typeof this.restoreConsole=="function"&&this.restoreConsole(),typeof this.unsubscribeResize=="function"&&this.unsubscribeResize(),sC?this.options.stdout.write(this.lastOutput+` +`):this.options.debug||this.log.done(),this.isUnmounted=!0,gq.default.updateContainer(null,this.container,null,Swe),twt.default.delete(this.options.stdout),e instanceof Error?this.rejectExitPromise(e):this.resolveExitPromise())}waitUntilExit(){return this.exitPromise||(this.exitPromise=new Promise((e,r)=>{this.resolveExitPromise=e,this.rejectExitPromise=r})),this.exitPromise}clear(){!sC&&!this.options.debug&&this.log.clear()}patchConsole(){this.options.debug||(this.restoreConsole=$Ct.default((e,r)=>{e==="stdout"&&this.writeToStdout(r),e==="stderr"&&(r.startsWith("The above error occurred")||this.writeToStderr(r))}))}};au.default=dq});var kwe=_(lB=>{"use strict";var xwe=lB&&lB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(lB,"__esModule",{value:!0});var nwt=xwe(bwe()),Jk=xwe(eq()),iwt=ve("stream"),swt=(t,e)=>{let r=Object.assign({stdout:process.stdout,stdin:process.stdin,stderr:process.stderr,debug:!1,exitOnCtrlC:!0,patchConsole:!0},owt(e)),o=awt(r.stdout,()=>new nwt.default(r));return o.render(t),{rerender:o.render,unmount:()=>o.unmount(),waitUntilExit:o.waitUntilExit,cleanup:()=>Jk.default.delete(r.stdout),clear:o.clear}};lB.default=swt;var owt=(t={})=>t instanceof iwt.Stream?{stdout:t,stdin:process.stdin}:t,awt=(t,e)=>{let r;return Jk.default.has(t)?r=Jk.default.get(t):(r=e(),Jk.default.set(t,r)),r}});var Fwe=_(zf=>{"use strict";var lwt=zf&&zf.__createBinding||(Object.create?function(t,e,r,o){o===void 0&&(o=r),Object.defineProperty(t,o,{enumerable:!0,get:function(){return e[r]}})}:function(t,e,r,o){o===void 0&&(o=r),t[o]=e[r]}),cwt=zf&&zf.__setModuleDefault||(Object.create?function(t,e){Object.defineProperty(t,"default",{enumerable:!0,value:e})}:function(t,e){t.default=e}),uwt=zf&&zf.__importStar||function(t){if(t&&t.__esModule)return t;var e={};if(t!=null)for(var r in t)r!=="default"&&Object.hasOwnProperty.call(t,r)&&lwt(e,t,r);return cwt(e,t),e};Object.defineProperty(zf,"__esModule",{value:!0});var cB=uwt(an()),Qwe=t=>{let{items:e,children:r,style:o}=t,[a,n]=cB.useState(0),u=cB.useMemo(()=>e.slice(a),[e,a]);cB.useLayoutEffect(()=>{n(e.length)},[e.length]);let A=u.map((h,E)=>r(h,a+E)),p=cB.useMemo(()=>Object.assign({position:"absolute",flexDirection:"column"},o),[o]);return cB.default.createElement("ink-box",{internal_static:!0,style:p},A)};Qwe.displayName="Static";zf.default=Qwe});var Twe=_(uB=>{"use strict";var Awt=uB&&uB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(uB,"__esModule",{value:!0});var fwt=Awt(an()),Rwe=({children:t,transform:e})=>t==null?null:fwt.default.createElement("ink-text",{style:{flexGrow:0,flexShrink:1,flexDirection:"row"},internal_transform:e},t);Rwe.displayName="Transform";uB.default=Rwe});var Lwe=_(AB=>{"use strict";var pwt=AB&&AB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(AB,"__esModule",{value:!0});var hwt=pwt(an()),Nwe=({count:t=1})=>hwt.default.createElement("ink-text",null,` +`.repeat(t));Nwe.displayName="Newline";AB.default=Nwe});var Uwe=_(fB=>{"use strict";var Mwe=fB&&fB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(fB,"__esModule",{value:!0});var gwt=Mwe(an()),dwt=Mwe(Vk()),Owe=()=>gwt.default.createElement(dwt.default,{flexGrow:1});Owe.displayName="Spacer";fB.default=Owe});var Xk=_(pB=>{"use strict";var mwt=pB&&pB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(pB,"__esModule",{value:!0});var ywt=an(),Ewt=mwt(iq()),Cwt=()=>ywt.useContext(Ewt.default);pB.default=Cwt});var Hwe=_(hB=>{"use strict";var wwt=hB&&hB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(hB,"__esModule",{value:!0});var _we=an(),Iwt=wwt(Xk()),Bwt=(t,e={})=>{let{stdin:r,setRawMode:o,internal_exitOnCtrlC:a}=Iwt.default();_we.useEffect(()=>{if(e.isActive!==!1)return o(!0),()=>{o(!1)}},[e.isActive,o]),_we.useEffect(()=>{if(e.isActive===!1)return;let n=u=>{let A=String(u),p={upArrow:A==="\x1B[A",downArrow:A==="\x1B[B",leftArrow:A==="\x1B[D",rightArrow:A==="\x1B[C",pageDown:A==="\x1B[6~",pageUp:A==="\x1B[5~",return:A==="\r",escape:A==="\x1B",ctrl:!1,shift:!1,tab:A===" "||A==="\x1B[Z",backspace:A==="\b",delete:A==="\x7F"||A==="\x1B[3~",meta:!1};A<=""&&!p.return&&(A=String.fromCharCode(A.charCodeAt(0)+97-1),p.ctrl=!0),A.startsWith("\x1B")&&(A=A.slice(1),p.meta=!0);let h=A>="A"&&A<="Z",E=A>="\u0410"&&A<="\u042F";A.length===1&&(h||E)&&(p.shift=!0),p.tab&&A==="[Z"&&(p.shift=!0),(p.tab||p.backspace||p.delete)&&(A=""),(!(A==="c"&&p.ctrl)||!a)&&t(A,p)};return r?.on("data",n),()=>{r?.off("data",n)}},[e.isActive,r,a,t])};hB.default=Bwt});var qwe=_(gB=>{"use strict";var vwt=gB&&gB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(gB,"__esModule",{value:!0});var Dwt=an(),Pwt=vwt(rq()),Swt=()=>Dwt.useContext(Pwt.default);gB.default=Swt});var jwe=_(dB=>{"use strict";var bwt=dB&&dB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(dB,"__esModule",{value:!0});var xwt=an(),kwt=bwt(oq()),Qwt=()=>xwt.useContext(kwt.default);dB.default=Qwt});var Gwe=_(mB=>{"use strict";var Fwt=mB&&mB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(mB,"__esModule",{value:!0});var Rwt=an(),Twt=Fwt(lq()),Nwt=()=>Rwt.useContext(Twt.default);mB.default=Nwt});var Wwe=_(EB=>{"use strict";var Ywe=EB&&EB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(EB,"__esModule",{value:!0});var yB=an(),Lwt=Ywe(Kk()),Mwt=Ywe(Xk()),Owt=({isActive:t=!0,autoFocus:e=!1}={})=>{let{isRawModeSupported:r,setRawMode:o}=Mwt.default(),{activeId:a,add:n,remove:u,activate:A,deactivate:p}=yB.useContext(Lwt.default),h=yB.useMemo(()=>Math.random().toString().slice(2,7),[]);return yB.useEffect(()=>(n(h,{autoFocus:e}),()=>{u(h)}),[h,e]),yB.useEffect(()=>{t?A(h):p(h)},[t,h]),yB.useEffect(()=>{if(!(!r||!t))return o(!0),()=>{o(!1)}},[t]),{isFocused:!!h&&a===h}};EB.default=Owt});var Kwe=_(CB=>{"use strict";var Uwt=CB&&CB.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(CB,"__esModule",{value:!0});var _wt=an(),Hwt=Uwt(Kk()),qwt=()=>{let t=_wt.useContext(Hwt.default);return{enableFocus:t.enableFocus,disableFocus:t.disableFocus,focusNext:t.focusNext,focusPrevious:t.focusPrevious}};CB.default=qwt});var Vwe=_(mq=>{"use strict";Object.defineProperty(mq,"__esModule",{value:!0});mq.default=t=>{var e,r,o,a;return{width:(r=(e=t.yogaNode)===null||e===void 0?void 0:e.getComputedWidth())!==null&&r!==void 0?r:0,height:(a=(o=t.yogaNode)===null||o===void 0?void 0:o.getComputedHeight())!==null&&a!==void 0?a:0}}});var ic=_(ro=>{"use strict";Object.defineProperty(ro,"__esModule",{value:!0});var jwt=kwe();Object.defineProperty(ro,"render",{enumerable:!0,get:function(){return jwt.default}});var Gwt=Vk();Object.defineProperty(ro,"Box",{enumerable:!0,get:function(){return Gwt.default}});var Ywt=hq();Object.defineProperty(ro,"Text",{enumerable:!0,get:function(){return Ywt.default}});var Wwt=Fwe();Object.defineProperty(ro,"Static",{enumerable:!0,get:function(){return Wwt.default}});var Kwt=Twe();Object.defineProperty(ro,"Transform",{enumerable:!0,get:function(){return Kwt.default}});var Vwt=Lwe();Object.defineProperty(ro,"Newline",{enumerable:!0,get:function(){return Vwt.default}});var zwt=Uwe();Object.defineProperty(ro,"Spacer",{enumerable:!0,get:function(){return zwt.default}});var Jwt=Hwe();Object.defineProperty(ro,"useInput",{enumerable:!0,get:function(){return Jwt.default}});var Xwt=qwe();Object.defineProperty(ro,"useApp",{enumerable:!0,get:function(){return Xwt.default}});var Zwt=Xk();Object.defineProperty(ro,"useStdin",{enumerable:!0,get:function(){return Zwt.default}});var $wt=jwe();Object.defineProperty(ro,"useStdout",{enumerable:!0,get:function(){return $wt.default}});var eIt=Gwe();Object.defineProperty(ro,"useStderr",{enumerable:!0,get:function(){return eIt.default}});var tIt=Wwe();Object.defineProperty(ro,"useFocus",{enumerable:!0,get:function(){return tIt.default}});var rIt=Kwe();Object.defineProperty(ro,"useFocusManager",{enumerable:!0,get:function(){return rIt.default}});var nIt=Vwe();Object.defineProperty(ro,"measureElement",{enumerable:!0,get:function(){return nIt.default}})});var Eq={};Vt(Eq,{Gem:()=>yq});var zwe,ed,yq,Zk=Et(()=>{zwe=Ze(ic()),ed=Ze(an()),yq=(0,ed.memo)(({active:t})=>{let e=(0,ed.useMemo)(()=>t?"\u25C9":"\u25EF",[t]),r=(0,ed.useMemo)(()=>t?"green":"yellow",[t]);return ed.default.createElement(zwe.Text,{color:r},e)})});var Xwe={};Vt(Xwe,{useKeypress:()=>td});function td({active:t},e,r){let{stdin:o}=(0,Jwe.useStdin)(),a=(0,$k.useCallback)((n,u)=>e(n,u),r);(0,$k.useEffect)(()=>{if(!(!t||!o))return o.on("keypress",a),()=>{o.off("keypress",a)}},[t,a,o])}var Jwe,$k,wB=Et(()=>{Jwe=Ze(ic()),$k=Ze(an())});var $we={};Vt($we,{FocusRequest:()=>Zwe,useFocusRequest:()=>Cq});var Zwe,Cq,wq=Et(()=>{wB();Zwe=(r=>(r.BEFORE="before",r.AFTER="after",r))(Zwe||{}),Cq=function({active:t},e,r){td({active:t},(o,a)=>{a.name==="tab"&&(a.shift?e("before"):e("after"))},r)}});var eIe={};Vt(eIe,{useListInput:()=>IB});var IB,eQ=Et(()=>{wB();IB=function(t,e,{active:r,minus:o,plus:a,set:n,loop:u=!0}){td({active:r},(A,p)=>{let h=e.indexOf(t);switch(p.name){case o:{let E=h-1;if(u){n(e[(e.length+E)%e.length]);return}if(E<0)return;n(e[E])}break;case a:{let E=h+1;if(u){n(e[E%e.length]);return}if(E>=e.length)return;n(e[E])}break}},[e,t,a,n,u])}});var tQ={};Vt(tQ,{ScrollableItems:()=>iIt});var Lh,Oa,iIt,rQ=Et(()=>{Lh=Ze(ic()),Oa=Ze(an());wq();eQ();iIt=({active:t=!0,children:e=[],radius:r=10,size:o=1,loop:a=!0,onFocusRequest:n,willReachEnd:u})=>{let A=L=>{if(L.key===null)throw new Error("Expected all children to have a key");return L.key},p=Oa.default.Children.map(e,L=>A(L)),h=p[0],[E,I]=(0,Oa.useState)(h),v=p.indexOf(E);(0,Oa.useEffect)(()=>{p.includes(E)||I(h)},[e]),(0,Oa.useEffect)(()=>{u&&v>=p.length-2&&u()},[v]),Cq({active:t&&!!n},L=>{n?.(L)},[n]),IB(E,p,{active:t,minus:"up",plus:"down",set:I,loop:a});let x=v-r,C=v+r;C>p.length&&(x-=C-p.length,C=p.length),x<0&&(C+=-x,x=0),C>=p.length&&(C=p.length-1);let R=[];for(let L=x;L<=C;++L){let U=p[L],z=t&&U===E;R.push(Oa.default.createElement(Lh.Box,{key:U,height:o},Oa.default.createElement(Lh.Box,{marginLeft:1,marginRight:1},Oa.default.createElement(Lh.Text,null,z?Oa.default.createElement(Lh.Text,{color:"cyan",bold:!0},">"):" ")),Oa.default.createElement(Lh.Box,null,Oa.default.cloneElement(e[L],{active:z}))))}return Oa.default.createElement(Lh.Box,{flexDirection:"column",width:"100%"},R)}});var tIe,Jf,rIe,Iq,nIe,Bq=Et(()=>{tIe=Ze(ic()),Jf=Ze(an()),rIe=ve("readline"),Iq=Jf.default.createContext(null),nIe=({children:t})=>{let{stdin:e,setRawMode:r}=(0,tIe.useStdin)();(0,Jf.useEffect)(()=>{r&&r(!0),e&&(0,rIe.emitKeypressEvents)(e)},[e,r]);let[o,a]=(0,Jf.useState)(new Map),n=(0,Jf.useMemo)(()=>({getAll:()=>o,get:u=>o.get(u),set:(u,A)=>a(new Map([...o,[u,A]]))}),[o,a]);return Jf.default.createElement(Iq.Provider,{value:n,children:t})}});var vq={};Vt(vq,{useMinistore:()=>sIt});function sIt(t,e){let r=(0,nQ.useContext)(Iq);if(r===null)throw new Error("Expected this hook to run with a ministore context attached");if(typeof t>"u")return r.getAll();let o=(0,nQ.useCallback)(n=>{r.set(t,n)},[t,r.set]),a=r.get(t);return typeof a>"u"&&(a=e),[a,o]}var nQ,Dq=Et(()=>{nQ=Ze(an());Bq()});var sQ={};Vt(sQ,{renderForm:()=>oIt});async function oIt(t,e,{stdin:r,stdout:o,stderr:a}){let n,u=p=>{let{exit:h}=(0,iQ.useApp)();td({active:!0},(E,I)=>{I.name==="return"&&(n=p,h())},[h,p])},{waitUntilExit:A}=(0,iQ.render)(Pq.default.createElement(nIe,null,Pq.default.createElement(t,{...e,useSubmit:u})),{stdin:r,stdout:o,stderr:a});return await A(),n}var iQ,Pq,oQ=Et(()=>{iQ=Ze(ic()),Pq=Ze(an());Bq();wB()});var aIe=_(BB=>{"use strict";Object.defineProperty(BB,"__esModule",{value:!0});BB.UncontrolledTextInput=void 0;var sIe=an(),Sq=an(),iIe=ic(),rd=Yk(),oIe=({value:t,placeholder:e="",focus:r=!0,mask:o,highlightPastedText:a=!1,showCursor:n=!0,onChange:u,onSubmit:A})=>{let[{cursorOffset:p,cursorWidth:h},E]=Sq.useState({cursorOffset:(t||"").length,cursorWidth:0});Sq.useEffect(()=>{E(R=>{if(!r||!n)return R;let L=t||"";return R.cursorOffset>L.length-1?{cursorOffset:L.length,cursorWidth:0}:R})},[t,r,n]);let I=a?h:0,v=o?o.repeat(t.length):t,x=v,C=e?rd.grey(e):void 0;if(n&&r){C=e.length>0?rd.inverse(e[0])+rd.grey(e.slice(1)):rd.inverse(" "),x=v.length>0?"":rd.inverse(" ");let R=0;for(let L of v)R>=p-I&&R<=p?x+=rd.inverse(L):x+=L,R++;v.length>0&&p===v.length&&(x+=rd.inverse(" "))}return iIe.useInput((R,L)=>{if(L.upArrow||L.downArrow||L.ctrl&&R==="c"||L.tab||L.shift&&L.tab)return;if(L.return){A&&A(t);return}let U=p,z=t,te=0;L.leftArrow?n&&U--:L.rightArrow?n&&U++:L.backspace||L.delete?p>0&&(z=t.slice(0,p-1)+t.slice(p,t.length),U--):(z=t.slice(0,p)+R+t.slice(p,t.length),U+=R.length,R.length>1&&(te=R.length)),p<0&&(U=0),p>t.length&&(U=t.length),E({cursorOffset:U,cursorWidth:te}),z!==t&&u(z)},{isActive:r}),sIe.createElement(iIe.Text,null,e?v.length>0?x:C:x)};BB.default=oIe;BB.UncontrolledTextInput=t=>{let[e,r]=Sq.useState("");return sIe.createElement(oIe,Object.assign({},t,{value:e,onChange:r}))}});var uIe={};Vt(uIe,{Pad:()=>bq});var lIe,cIe,bq,xq=Et(()=>{lIe=Ze(ic()),cIe=Ze(an()),bq=({length:t,active:e})=>{if(t===0)return null;let r=t>1?` ${"-".repeat(t-1)}`:" ";return cIe.default.createElement(lIe.Text,{dimColor:!e},r)}});var AIe={};Vt(AIe,{ItemOptions:()=>aIt});var DB,Mh,aIt,fIe=Et(()=>{DB=Ze(ic()),Mh=Ze(an());eQ();Zk();xq();aIt=function({active:t,skewer:e,options:r,value:o,onChange:a,sizes:n=[]}){let u=r.filter(({label:p})=>!!p).map(({value:p})=>p),A=r.findIndex(p=>p.value===o&&p.label!="");return IB(o,u,{active:t,minus:"left",plus:"right",set:a}),Mh.default.createElement(Mh.default.Fragment,null,r.map(({label:p},h)=>{let E=h===A,I=n[h]-1||0,v=p.replace(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g,""),x=Math.max(0,I-v.length-2);return p?Mh.default.createElement(DB.Box,{key:p,width:I,marginLeft:1},Mh.default.createElement(DB.Text,{wrap:"truncate"},Mh.default.createElement(yq,{active:E})," ",p),e?Mh.default.createElement(bq,{active:t,length:x}):null):Mh.default.createElement(DB.Box,{key:`spacer-${h}`,width:I,marginLeft:1})}))}});var SIe=_((rJt,PIe)=>{var Oq;PIe.exports=()=>(typeof Oq>"u"&&(Oq=ve("zlib").brotliDecompressSync(Buffer.from("W+l+VsN2haE7Qar0V7bL054QhwxTNgT99Rj+mqLUkdu7oIz7CvRmL+I6141rGqqqaUllyExgpqUFgKn6/xUyh6GpUSDJkxp3dgyYUXCnuQzbaRlU7ug9w2B0VqnVZOEu5BF68ZBvXqWJceJG+FMmj4IhkOyQECcytimFV5zt7waXNFX/wn25BIkEkYSP2cN9+Ycl+oqk2om4On7J/g4CQp/03+vt+hx4rkG6bI9HfBvg5HYVDkUI2QQxB2vT59aQZ0zzaeZKsUXQ5rU3p5P5aVI3U8BKm5tRX9afRv5jW3afy+kNZdsEya8ZK2aEIYhtM+PUQnazaf3zeQVdQWyVTJkMW7heX94iQ2DXqZoA15w5v5bqn85o/BXGjFKujB77S+muK7Bs3ISa7STiZSr+83m5O+4czgtLyKGWQAeyMzrIq3OcZmr/fl7Te4gds8dNAfUqdtQ6Gx+wnPYhCKZi0gprRRI49KFi6Wfrp7Ib1G1Y6Mybf05BVXVxZJOF/lRTocrTz61fSa+uCA6MXyx9nv43nT+jcMv4ouuQN+bnJV1hQpW2jNJLjZw7BIoA1zqD1K+a2cffvWpfv8i3QLGd1ZFxi2S326PLqp6ITSh3BnwYZ0lnqpp7lsnI9EWpY23rlymVfh1guvAC0UKiAObh+Q+9/8+P+957oUF8rjNzJhk62NXQ9E+nejA0yGUXG9mqMBUPWR3uXnT6qIyCXjcengq375eLmfmqg1+2p25Xp6uTABVoIO4gaDVkAN9DbZ8WFpvPhw9TtNY+ZzebXIADcyEi/aGteyEiqm2P6Mc3to6HPUhlu3Z88xlwUKgHUtHNcPmQu2Hi7pfgEUvbHw/8MpR2fJI1iUgohn7cKmtNz4DX+7uG/vEKtSzziH/VSiKb7BfVW9UNKk8lU1A81t7847EbbPrqEYgO0sQWjTPILdF9xMi4+3ddP/7H9P8/P+59rySxt+ZzrdmzYRBINhRb82fM6UBtiwZT9PjRj+Y/fX7s7l6iJs98nqqubooBtMAeZf4mzLUgqOMNk6AZ2G6OhjABP8B+/75p/355DsfKGB8qiORnOEbOhj7XytFVtc8e9G3ziQaaMwBIPvGRz/FZ8jlzqurcRhuAIjCG5rsx3/tIzoe5YgWZsoicSF+R7P990zKdoZzNlEXG+Cgm3rv311kA3dAOzTrOjryN7Kuq+34D3Q0uCHKdrIsU+iB2QRiBpMzIZIok61t+JZVx0EJyBkmeCwFdd0XHmTEynuoJYNGfGZkVa7+sJQhWz8rMwvv7vlS/tue+l0gkCIkCKX4btP6gP4vfNczbSgD0MP1h9B93VT0tNua959wbevfelzbey0Q582WijXyZCCMTZBQzE+ggAOq9TIBOQLQalPR/Q7T9g/J3RVOyfwX/OIkgFWGKcoRJWjXIrtHucVpVhFzDOK3692qMfQ1zj8Oq9r3rxb6XvV70rpfLWi17sWzr2zs12sHfO+FR+sofmi1VGDBmdqlJ9tIUL0IFnqd733TOL+dO2iTgXXAStbyf2qhV0HBr28DWKGsrtuiLuLhXS0vaabp12rPWYR2lc0RkwupSQk6I/c6vg8J/ho4BkKg9vUMK8IubsAPQjBoDY/P/9AdL6zz4CIHcicKQ0iTzArM11YUAL6a/u6MN5I1TbZ8UBpZgqQQG3fgl9c1mnRUIZ5SVs1lmTR+hkQhj2mx6fL+qxKIqTC/ZOMaDcI5lTlEToLK6YMcxvz5vJ5jb/2GOnkre3TVcICrDAY6FpaI5ZmQm7lE5U0Szen8Mj5v2ex2BHhK+rhM0RIUkQBJUW4pUAikQvGLb7nzodev/L67AHgW5r+yurtBTgDRKQgATWg2hNJEm3YLirqf8UjITyyvzvNv+Dz4d3n7RGCALYmwg4ARhA6RCqRhVC0AB772/W78IBPf+tQo+YNBuQfAEbqcVyezm/adwIYgTVJigEBFUCKKACxWGGGIOF1MYMoUCUwzknnw79///fHedA0+//xZbVCxRUYYyVERUhKEiDBUZiIgIMxERGago0eR2H2HY/AGzdH3/4N85h+bzX7BgwIABAwwMLMWKrMjAiiylgZXXwMDvyaAjNSg9NWgw+Qz/2/x/2Fd0vsgNUOQgKdhLK66bumhj9/XPD3k48aJMUP4pKB3wXSPvxx8edwxK7Hcy0anAn8KmoQ+dzDQrcLWy5vnf5hAns4vqQsLku+/SNX/K8l0rqfmiuzTNCnzXQ+/uRaJDZYF54IW7a9h8r5JZwtqZIvBWvcWqb3dbSirkB3s1zl+W+D2KvN9jNM1KJCEt988ZLroViHfHOJFpihKSyWLi+L2uT6esk8pXQTvgRRuXwRIM0usTIebjOyySNT7hZyIaw0saXByou6EFRNlW8r7oKUqFqiRgfRnNW9o1hBFa637N57XbPq28KdVYXAReXF0MyPjdCqtd7K9ZQzKkPeHEgFt/IGlD7nZClkiWYv+qEu4qklQRC8PnuS/qCMt5O4+udsV/e+ex3bTkwv4FRCv6v/4ibhb2gznGa0CYB5QmN9+k2sdAEiVaiTreoP+wgjxDrj4tP6LpYi/8/0j+z8B49g+7NoL6oErA8ng/8Vc3ly/F+65tiuK1U2+rkFVmb3lv7JGJPIidl//ga2Jqpk0Zvm+3s7zhcXLBAQYuxWsK1Ey7Sr4mxgFNoTZYEzcT4TphXbyBQtUHaxKLk6HKEMPnqFs7pdLu/KTOqrRHeT2S4j/HLqm/U3H65wTh+Tszz+9QKkd5DRL6arMF/KnMIwvjr4MaRLIzna+6/2JvG5qODsrKcJDny+GmfHhgTX0NxTWS1+LiG+FbYnAVSrldrBhk+jN6NS522jLyHHWCML2EI2LPsUwMGtFSwEbUlnKgloTbb1exETgrxsnPEfEKeUvCEASYdVy6lC5DK53v2wgxeTSJyPdzMbUaU9c4toZ/bCzIDTIEUezMWB2i1C5VmgfS7vu+KWuRFEIKC3zXE8LkrZOlIN4jE4OzxfXVL8BWXej5X5yZEEADC5VI9oTZKvu+qSQD1IgoxYJhWCYUK1Euar8fujb2vjIYE5ACvtSn61b7LkoV30dbahXTlIjI60qy6c2B9nnqkdT/hSdb/3gksZcvJ9YIech77izcF3hF58YQsvXQ+YUzRTL6Lwdk+CuVI4BP5dD6QeHNKTv9bzFlSNn9pPlUR14BPZD3h6+GGSv/2+Z+gekFCh2K8GsSslZ6jifQq8DyM43vUmfQH+HlFxCoP9KVEie+L3d0n0rl6yp0LTDDWflL8i7tLtV49BxpwtH2O1/p2NqrMIPguhJ6YJc6r5ron5RmEqbQbetVhnHa8nb9wBudbYXv+5K9JqEoo00WR+7vUpx+TnJ+HWzwmQvbHbcrZS6O7u/JPI+zoN/yXI4zQoRfxxBiIdPXNl+zDDCAspCGUSZC8BKmL1r4spIzegmLl7B4iYsXLj4gyyT5R9l0X7OMMuBK6lIPMW7HETat6LDsMtljX4bBGqzfIHyLEn2He+goyKOE/1/YQ+18oMRPesj36m3IYWXft69qXuz01mC7rONXllrHNC3gLvh3MMgYsYQwRPgTQ3g515hDQpyKy394cetsBGLbxfykPO+uptwRVtlHMmG2rCcSalTWylgUHrzXRvBfYK61awPLQUY4vaTu5JEiM3NFBd3jCFrtpTyfdEDmGUqfYxiP3t2Q+/AjtG6ya0RMjtaItIQPnpyGHFlGAcuR5lY7XWYSQnlOeSxT2UYl6BbOzafjvHT2dax3djYTy8vmi+eXnX1pmFvKsSF2wmUspAcsa0b/9J/vcJvqVshJFjLaQjipL2wSWeaVhH3SRQS42EMCSQVz0QU0ysjI9oZ8qJXlT2cSXzKTtxWyO+J2s4z0xa5mA8rAF41uA8pAlnMDRQZRRqeydKIkVBttLOiJj7otVMjVQQBaddPI5QHgog4wLycI9RYYs3eiijB1fHvBP3Dst+umYkT09ZdJLfoZaHebwaUj2p7kkNSJrdfl9B3FX9IbVao/6KoCp3O+mNZoJ1fprmGmVoqfpi/Al8sL5JeZmUkRB7Sb8bjmR1ZdlMVejVYtf5KaymeTXiOkWjG68u6Twlo1RWsL56Pk/EqklywRmP5gy0f75H8pMK0ZaJtr0djdTiUKGw9hHkg03crC936NpB/CrRr01+7Et0u500u0XxjAXpre3ZIKtU+e5w/3eXjCc5my64nOqA/JJdesv3Hx4Hsmn04R3UuTVrKUJTJmQpFI/SkcaHoPr/34wZoCbUvYH5PQzmqnT1xwm9/J/9BI9itOEIOgDsTRscE1agnzLTKlOahqsFmQ+Ync8sX25YzauS/zb8nbWqLQOCy6wqYt/egnXKlLNjbqG260/YDVVYRODQxHq4j7AaK0a4tLGG62wrJxcw56SwjZe0Ic/E/H9lfFxQJTeipszQ2p2dGwzc2zctCM9PIwC+tY0hp4hSJ/KYmJT4SLJqqUmn+dqSzBc5/EUhOX9g9+2J6gUxZ0SIt+nA10o2oMlmjQDv/nmvtrEVDr3syG0POBda+KqMG0OuT5khEU5XBsWJYkKEKJrdBfFvosyDDardMK0EU023CdgiYWY/TihuLulzqrRiPC4Cqwzwx4pfEYw1SafqXLCaadWlSJ1GGKSq0oEb2xXFBhPPzRC4a+c6OR/zEPpFz2P50w67bub7fO75bL43tC/F794uDI3y7p9UuyKyGJDxYqL+eUnbbSbMVw0LOj+0DJ8EsxQZXYVfrh7/i7uyn0gzUChIj6E1gM7qxdRSn5IEYUBoXjVMozTGy4MBHikbe5L2GIBgGaaEEtzfQuinzuugZtt/uTfiuqNojvHYWSKIvQNDODw5TJiqIgzyZmBC1JoiY4od9Ni8fTMQuQ7RQ2V3ok9nnLHdhgBR7YjMUeJEz9sNQmvb1KsqCiKp5hGodcNPn7UJTI2r05CQnCL0w8fhNKU9DPrGCdG6m8X0uYZtF+0/5IFovyh8qzKAZI3hpCHZeB8OuCFMsZfL4+me7RVt1mj01XrY65k5ULP8kXpVDZVIAsuILERqVAecR6gIrOVDrRuQmTapbBx8fgr4P14BSj+u6bA9lxRoo86hRcp8NeP/z9Yta2c9sZr78q+hAYe7KuM7Z9NIxt611mikYFqilslG1xHV0rckjYVjOMwUcLRotYozsMya4MGKILLONIz8EcGj3emhjTrYftKOSd2J2yTQ0yoQRTpWsk998arv2S63RNGYaosiuq94pCCiQOtPdF7IzzPY4mzQ7m5XXExVX7kG58PupTrJKvlljR1sLwmVnrG1TfiNIMZ4MC4jM9t5ZQysGPhPWT94scztXsDqlrv9lsMePMTZK6RQLk0epHE4HRl1EqpNnrMlZHab5EUnrExOxVs1RHRGeWZTIpNe0qO7qS5BCldq/kw0B1e0AK69zs4S1SaxGWZMf6iCAJVsCouZCiUj2q5aQWqrTfcCoWLbuLZyjDCnrIMtqnwhBMaz25LGPcGZhNbJh8lzkzLzozw5zKsrfmE1kdTGl3cH2yMVEKI2cLkUlTfdAE/VTcQkQb7Ett2WOuGO19yT7eRPPuM8zAm//4YW8DmzHpR3OfdA1NnVh8IDKGt+hxavERb5ftNrvxrRMdyxrKjJliSGay1DUvP6SxHyArLO66EdJXTEVpMxpPwS2N+wHKUCNUnhzN8yRmHY6rnFSoM84WF9hISdBijdAkZe3ZKjUwkgFHVmMjd9SreSP+hQ3TWk88FkYsN3kPc2apUNVF8RYp3cXcXMmDOaVeYvrrkYidtEeuDZChEBLucWlUZpsQpRU3SxI08p0KhVUtmGmejirEvDxm9anxmWb/00Jqv5YjkLQSRVaWRhkUokta6o+QDH7TQJYHsLAwg6rMDrk2b/M4GiaqnVWc05XVMHJ7JYBkrV58hOngbGv/M/MIWycCAAjzvLPnp5XN5X2WGfr1XOabMMX2syRwVfBgCYShIp4B1bRUTW3fBQvS4x6PdAKFnPmAR/oVyTQ4+UAstbw3C9TTtVzUgY8YLMas+1EyK3W20kO3TYVUsnptmjZ8C+/Jtw3NZMSVz3UTZoOosAkOi8PkVCkFTw3njRuOcfoc7e6w2Oatm9DvRwqFYf7GlysBH3CqQ2NgjYrldqYOEabhDPvPVFoEkfZH2eSwt3nwSz+6JY9Boz+gXD/JDZMsmUy8FFdsXTW4z8fHCxZxnoPyR034QrJuqnerKvGjJ6P5TD51Ug7oI1qeKHenOZ2eUJzSLRoDpf9DRf0kqo3nKaCnYJksLcJo3J11rt66sA+1We3M6eg5lRiWCUDrwk0aTB+o4b0yjK1+rFCVlTDP3W7CdYE4gzHp8vvEFkGtvesMQRCF2ptMrpnWHUjwdI/1rQo+yVC3sntEohYJI20lOyhsSjFfyGKBtB8z/8AaAgyANr3XRThVfxzRblvhZu7e5WnMciFwyRrfJQAn2gFFHnV2OQckwJdpor/b6ABWE66UfFljq0ZaHc+E8OONVWPVS6qgCYsoC/XehGVksT0BhmtKYsOdJwlPGkQSFuKCz2BxjoPmvMU5Py9CPmKydlksy2V0t2eNZOA+kpKVKHrFfrx2EkQTc6hSC47urdGSZP/sdL/6l3aADKXlgfKesj3IXWUhRjhFG2eD7K4+o5rqrkSCI7X7BzNC8bdXtEXI0oNJMArXxnD0EvyZeG9/ccgvk1ZZvGbcOmYN0cVQFFipokzSHv1IIFJ12Atiql8taNIL9i1iHEECP/J7ri8FGVLOcktZtiXilGiaMZfLKbns7eF5YFoWt+ok/843MjpTx3ZdNqKMMp3aqayo65+R0QJCqNna7+K5cHIs1eSgskirmAVFtUEebCwWt1rxPXJ18qenHVslS2Fo7iXEjEHggOnRgoEdM/rqU/vgfDwM1TMPL/elnuNYk8V+kngJG1UxWiDTqclPEybqtdtvA7HW8ayvnbkHKj2sThnjqtxYtJz8JyPuy/hbCRjpPhSl6z7SUj7fSGmGIwZglRGmMQXAKFMb7+pgqiKd02TNOt8r0bhLEnXHLWNZORjuPNw0jlr0ilxKIEhAk1Sq1dCGWS0A99XGgib7DvblYFYm+lYwb1BihlxgoIyHqAchZ8G1O46/MWEFdmZZh3/Y+dI2X2y0Esay88gI/0AUIwZFo7A+V+HkgLnOJb4zw78+c6e2MUR9vwDuqYOmv5I5VEgZntHU4wJkj3xdz0u7w+NXdO3aegqdLru2HkEzd2BrdFMG/MCGiCh8tM1lfPIJAZzZBEb16MPCY1GZPyqZa90cpZjt4kWC9FcK0azP0gHmYwFRBvb441WgMrXY7cZ57ORQvVtFxAhuYvPhilvW05iWSwoKPn21m0R9NVKzDuteUX2DAZMCJnw4mmjSFFpgI1+bBoSEE616J5O50IqhPyYNMRe3ULK3MzK77eT25o7NA3dLaFnnNdVx5jRHo5irH/sz78UD4pfN7th/KbHU+lw4v/4/frPljNigh0/uWT/xPlWoPgJXe3P21ljVHYM2GG6Nk2jbfV3usMOVtb2i7nC69XnXzYVg63vTumBE7Y3lgXLTOJPKUb0nNx3iywVGyWwDPHvIsO+M9bxI9vAT6xZ3qIUXHYLJng/rsHOAbVv3YwWGqwQu/xffUis25jTG6ZERMyt0qGzufNAYubiW5q5jDBmMgwYHPW7R+VZSNzD0VYHhYqU5UdKckpb9bx2NGkadLy6ra0/IiaC3ak1qry6QGdsJXhUKrH1oPjuLEJF8LtdrJ7oGignzAsFz/jrt7Icg1dqBMG9SEXxebS4gFFJluXv+6WBcF6Rf+E04NL9fpRp9i13vyVXVpla4EP01o+9nASwt4vTKuhRuQ9EkMQRo1zsU96oUJXk0RWhZG0MXyTfjYV2uEYIKs6IsHFV2MJ026Xy+5JcdS1aB7ji0QP5slGsmOMHDEGlFgywug8PB0nNmvXXd+LVsMI51WOsDEPWlEt2m07rfvPuu0cSWj9YG9vXrnucYHbwUtXKFRXm66bUONqq1nO3J8eq9Fsk717Ktty//5h5v96Mq5OI+3Yai/E454nqYhK24M5ELIIQ0aWqsmIIqYsAXVVdDBAvVHaM1hBGLHqrMx0lSi9IPuv3rlmYuYE0YHSia8+1NnIlLpOGp/rSTRrK086ZZNtUdCFqPFirrUb2LwafZMpfLYXark458gsSMfz/QvRLDIDTppWDyvV1PTkarLQWKFjtnmDxnxH788wf3UcJI9CjNW35Jeyd/o2c9RLLnJOGaYV1BvDa+bNeDcGAqXb7wa8IsScvaD1fL+GUirbkRQiAjaXCQoT8krc2EnwPojASSTWVlaAJuzk4e3rkMVXX4b90KC4Xi6IXoi+FbH1hX/M6LVuBiXHt7qdWKI1kN9PWkmywBrBn1AAtvs7TOambZ3Qol/kooVx5xt/0H+KpkRYVj1kOHtu6AHD7gUuz0jKePSIseeG5Rktd9s5ecX6COxZeLCBmESdICgayQV3mRwHbJ1n7tgK6GALiqNvIib+hRffGjOVFEAnuNmzRrW66FOmefWjY/nZwvI/rQcGWwkXh4TeivWp2Op5wXTpatZfRqMrK6BwevwmSrSY/+M3SJIs8MWQMS3CtPZ7sxYjWoPZlhHNtctubGA5VhJnrMzNmniZWlvjtIzfUALoXngYRZBA/uHJnGrl6op7JuGm9WC3fZKrLHl4x3bdA6M55ynMun15AtJhY4F3GI01YlCjcClNh2gC4/NkFsWxPLLgRUdcjm+gfT2vzZMPTSZTh3Zn0ChXDpPkXNZKEsIxhTM34VBScz1yUTQYwUXwnB4l2Hx193uKVBu0nVnB31HJwdFxIiHbibppSo653dob+pEMAabbgZobFqNx7R5bSu/rb4fiLYj+j6Wxnp3R5uNvExhuURsd21J0Gn+TAEX6UTRziZ0X7c95CdmOh2U065CKenxwErbQDYquV2NPm47srHjZJtcpY4cy+uAm0Os+xQYNK5yOISH5wBoPf/xFhWc0YZDSB1DAHquazY59gOMf1bXqwWnt3ojDPa/wEzcX+ev9sc1yLVrfSl6/y1bnfHPDFykjwVrsjiRhvHa9vhBE2gnH4PFURiDPCulWC5dhLrRfqbb4yYhIvYh10kvt9JaIiIK3RTmi5QNu0HXXZkBgu1Vsakg5E+U9lAM/k6MeZkDP59RWvLAaXFRfu9r/lVyuQ3MANnlqFGk/IKVVfcyXb1yg8BMl+jSK7JC0wCRtdHF4YepVwv1KfvMAPucufLtesZZKPuiw0qXSdYcpo6ar1CK31In902UjNLRdxdInLYyYaT1VNeeUT4qnDCZAhVY5/t2ivISXAzj4x+kj5Y+vKX5z4mODx41SVIOetd9r8pcheI1qVNR4N7e2CsGV9vD3uzyqUGIRKzN9SLfBvUcjfSeGsiXx/udBj/4sWImgTkgjNZ6BS2eAhw+c2i28fHPbxmKR0rZxB/xcLp9UvVOJjiIjFG7aDddYrWaP3QoJGaDJsItiHOrTQI9BmgG9omQ+XudpGF4HScDDxb5E4/MZtwpB5gqLzZTeOQbW6zPgDNoxQV1gcC6TSzPLoMxopoqF/e7N8wb47BAHvt3Vf7+1s0D/O9A0AioXm8P0aXy3Elb/fxlb5E/xRETcaLS0Fk9fqrbqu9HFa30gzr63S+/ciSHwF6FGlW9I67heXTAx4hdNLjK3BOBVMNtMNUgbuUzoGgsfcYbQJAfPqYJzmfRjDX5cqJaH6ph1EoC8hiQPNcj9RjHQL+8h5JAVO3ZdFzJSciR13Qou+/nsBDDZDIr1HwUPRTEzuAqRQl2jtT3FHOO5M5uC7g3S/b2bmXNjAb5ldvwjYYIfLJgAUJGH7pUR18255DAXipYs7OQP/BbL/BfAYfxG7ySGwjTPwRX4qfbRxH7nR7VuxAG6UprEabwDP2YoMC31CN2iGD1FCwbfN0/zOwoda2ADNnFzifKCuFRp/UhHw/VqPVzkpQdf9iehuAZKu+8h8GoszhJXzqkurg2hVQVtOh/zb02dxxk/HDU+2ia737RYCU5s1RNpB0sMwlwx6ui85upV6uNMhHjGBPL41FkKsEepBgkY82REc4/nvVrOuSKtyGLxsPPIq6Zct76a9cgsMC3cszz6cW7MIKesljjvb3ayFwZ2eyva+xz413OknxpGlah71rocM5gvnQ/L9bLEiHOb9tIBeIJZp8bjmDtS0hb8rLEk+ylC517KRCOi2pidvnIo0FJ5KeIMv0np0K3Gt1nnPkMJMaFTcWUYsLXVrlJhqAsvy2cNTT/SI8QtWHFpjf//OcJj8uotT3DxhIn/uvS416y9LtpOo0/N8AStGWJxhvOXGI1b4tP0UzuiToGaIpR2ZyBqXEdE4hk1CjL11qwcwI11EClmysWk5jfhtMVy6dujOuF8t6muqKyH6yr0JqTT38ZVFnR8DTqSLDjYjdrfT7Yfxoad8dq7XtxU3gmD76/ItjxNWTM4xDnR9wdRwAKArhcrmuPh/BHv/lIK6un6AaJPXDepiQqkMPOq3pUF7AQpxrkvLgEvZLmePuz+s0pRi0zalc8oNo9aEyDfNjZYzxH9JuUFBMLlAYSDl2v48uPOOkNW5nhwvPrnun9TpKfLctAodBzUk4lbQdUxvKVvuP3XjXFkXhb/C5olqLcTeOpRvmbeqIMuWNm9gL36O/RNUQ4sEv2O0lVLsLH28zIv7r7yqJOWy1I4CqzJZg7+YPahew/+QbL8eMA2S2u1hLBe5zM85navMkXyrP8nsIes3OQoY2c9gtyLX+UV+zpON3z01e4iso5ylTsTfa16F6/c+ye48l5lNI5+Mey+M53DLFL1GWW1hPGfXN9qq5uFDpaDdkx2LGEcan3X5AG2EkFm9iEcnXmJXDvm7qcUE4fftf9Ca3/qlGHg8zHuITVuP8IP3LGDmB7M7qEXUDfp+cy6IuOBvaVmKM44L85MjoxOCxkU+zHUpSQm2tUrhvb+3xlwpZpnnVB4ZB4BvjcN947Y57537OWab6qQjUNGND3+cSS/LcqHIYO7o638hdESlLg0fgn8Z4xOCsW7ponqFr9zuA0NHpOhy6Nuhvn4SE9nju9L2BrRe9I3666gZX7hE9vgjm5lIMTvsrFDh7no0KQ30pnDt55EoetYReMaPbB+ae9FeY7b4zZj/LVef4KlxPJLgz+hg60/ir1gjXmGV5rWJ5r/DifgUnqFVw6NG3nMrVtZwdNTOLSyL2evxdeJ+wnCSbKg9fc7HZc6S2P+3SA9z0nfLuph+8vCZXe7LMiiAR50XEGWTYKwd6Izc05kZEUGZk3fK8M30+pCwP1FPO86Rwx9lwZ3DEKIdYUTurIY1D7LY7iWWwPJe8/8VZY1TBiBcm+5yfSsD7r428sgqT3Ckii0exh1GTiImpY51M8ySn8uhIFAIlwWxDr+LEIkLjWtYtrRPmcjKNUiSQFeU4j/b9sCxRjB1W9RFQpPw/PUdJyptpFMs5JJG8DUfFwgxALgAtbtDB2pB8AGVNXpDYsqGxTseTOj4BVBC9yUvKBohMhKGKzFEYTxBzQ78OmvGjDxwPdZSxKdQyFX3uKmc6T5EdEuzDorszVzOxDQLyqDZHXwDnenUNdriIU7ZWzFonse+3A6TpknBkn+RbejCgbMoPc8gugap5Lkum3CQRV06AcQHDLmPrPj9e2451QlWQHPli947fSqgXNg4siGcs7A+ePg9CakuADdD7TebEOrjTVimY+EoBtmKkPOrbaa1iwFKlBgnH7fF6EAxiMhHgyeXsjzpuIlTh8cBgH897TSsS6pDuUEdi+vvkmHJGOAx5mqbLpJJRZqG82JhAo6AjbfU3JU9slFu0dEoSv6s5aeNVCbkweaJWrwboWhq6KiDa+h6Bb3UtfKgkZoBOFm3UKmkmaPS8nHro0TrOqJGVKWT4sQwTfFN3bt8fq+N/7zvtDIsoSGnEpvr6JHTyTPDoeXn10KP1rqiSrWnkCvqvKZTcXPz5tprVRe6Lrk6ArALA9o7LouzYycXFVbw55JFyLhxtS4Ur55uVzdbt6lJtG8lVFYjEtBaAX4V3esflYLTz5OSdhzegM8LQV3g3/2VbripnAtXYjOw3fZ4nI24KAlQQbToPwNKH84svh/yDiMEPMNkP2OLPFslVySJZr7c4gj0Afx1A2HSENx4lWaYXCeThjCAV4eTrRLpAMQfG9X6LqL8CQNfX0PUB8a68SQM8IrYBF51MITPr5I/XDAoQsQ242rFPZurJtNNJp3NOZ52GhuV4es9mT6BYIxFlbzDAemgWD0C2wFA+cy8/vreW72uY353cfHr/kLX90KzCJBMXqqZ3LfCRQA4mWKmqlSNrWrV0YIdA4PfZ7jmof7Dx5LzrVRW1pbV0nFdkE3LpIozjD52mY5IZZIOYqSzgoam6wF+uLxg/LBc4ea6Dq1deGFprwyjmaibI8XSxjSsFvcEyCfIk2GpMIwvkWfDEJpGJ+yEEp4TgnxXCch7b5C7wtQGQZgNg6YOt8+G/tbsqFLj9ukI1HZm4pMxAd1A8EIdCbVJMWoc5ykPBGG6RrCFzhCfw1EWvUxK2kf7OSUa6pmUrRkuB+jfRAyJ9PhGebHy0c0A3mteEaJMsdpefCQSRRNtfGKR61AQ63eM7dgPcx5gfJVIo+Doou263MTy+mn+0mPYrcZ4Ld3PNHSGBY1Mmm4czwS4p3rFlHGLxGA+LHWyPhwjaFAVXGtVLYi7JRk+ssoxo3QaPAbZpYvNz/2cTdj7e+YcndsNpfbZenkWRdmfr/V6OGo9IIIaKxHrrhJI/nV9fWxCnFTm8snhuSnDKB2hC1KOYH9Lq9LtH3g0M57JH6SJHzNfwoDKhvUe9vnZ54a2YZsm3i557Au4IWjg8gjkoR6Q7X8YVdTwPDolCLYOv1rDiXpCGWX0cCfkF0kW2ylkKymPONPLL22VBUPr2Y2KhVTdy2fg4xi41HP6F/Lt1IKO6h5Y6b37pfwKJ+F6NbQyyilMUCdfMCwVAsLfjEEWXJ/tGq3hDVIbpY9oge4dLPBC3R7YclTf25t7cC2+rd2GOU9J4UGad2Nky82Rg/j853qsZxUH9Z7yMyS7crdEoFSSBKkRuFAN2Rri+/Qk7D5KTrkjY8NrnflEtJwP+urruKKUVRkjBvhq6Tranmvk19uvk882goBEA5arAVjXs2PfA5AGQdlwjKQvgfaffNrV9nDFW6DAwc2yEVS7kzA9a/Tlu67MKFmJg+kw4m2kcdMrN+h8v2HyDbtZGOATAur7UGgkPdP05eRqnkBKY6pJ25+j1076W5gN9OQhs1MY8ZzzQBKgqbnDK8xa2Qasl3LSTtuNjnhkdNTWdCM1te8CYqmyz/CSXVxH2w3UnSZ0eMtuIqqOW2B4b0R94y3MylzeHnT6Z+nM9MrBlcLQrnR2KuHRm9vHoXHu99uX2NQL0WAH96jT0uABdH1Dd7tffmQ1GJDO/6erbEoO6D/ilH55alrh00rZTjhxVC4bDXtmvrsEskrIUkL4NNLmwA786hqeWTUYd9e2rc86qhcZjZ+0PDWolPjFPuhJ152q6Pvu23AQRGLg223b70x70SyHwUw1HO+xp2WC2H4cyEVgUctqTGzPneBaU8UO/YCwl3FLmuFx3VKXfTbuQ7inkgRKaemkGxq5Af8b9cirPvdoLFvVH1f/zEH0x4WbmlaEAQ7zF/X3a5lH7h8SEIZzZD7yT/fquDn+rN8j4LxSvY+8YiUJUOyrDnSNuFQ2aVRAFMOirsWhDs1fWmnlRil+Njx3QFTV0fUD9JS9+7e2hVSaGrmeX00SOqvrqL6tZfyJtp34SAHg90F81RIEsmXWm09Ocrnk657R66lkk5a+Wi208WVhm8mD6QiyECUyCWxZm+bUOhpUwBAj3Sv5T3yX8NteywYoGy3Ozo79CSYcDhnWk8txdc/4LsdzxQV26U47pOVdf+bOYdLoQBH4qcipDLt8N4OOp8SDT7Q23zecwyAltS4LZPLttajBQ07nt6G7c4ssJ7hJCanw7ji1OFylsCSjJI7EFPCHLcp5MZLu/PbWffV2c2vu+9/nw9xtAqXU71GrZfW+xqE/pR2qLrdvn+qnL/FfPcb8QpQO7jEz0VM6GZcUQ6FRDZ5yLBwEtAJiqwLCU0qb3AxTBTn5a+k9cpplzVdWVYbmmW2WkBjoG3bSXgDJkBsAYQUHHiy8FhVAEJsH8S0IBIqjxFZeG1rM/kt3H1QCRG5otXbCIWOipGDhSqzHI2Jw03c8WzWNrPZucm17A5jUkf3EkF1F+5p7yLrln/0wdOKvAcIpclrh1nn8sdZxLX7Wbro1uJ4s7kxQNbAMXcbHWlPCuVzpz+ijpi6nO1f3xEHXdxqxdv3FJRysRoXnIvRTqtDShm4u1z1NTQi3k/t4fPN8ygxl2fe+1UDk/iLV6060atj6xssMY8zLeTTQibx5elbDe55VxJUTmb3+cTYp8puHBaLvl+Xp1FjvpqPWO/Otg+Ib2nS38zd5juTu1NXcDeB0FzwpJV+E9evXyekm6LYGDWroufELKIpiG/FxIdVkNwI+KEZ1ziB5+/2SN6rAr7MYeZ2p5QOANZOuwNWz3fni30CixofB8AR//yAYHM1Tseg3lw7pAk+bHpKDnSoGJM/DbQReoe2OkOa6g4a5KdLHrNZQP0kHb69RmzGdX8DDeeTk1Ck1GsuseyjOj06qNuPDHuyXdPPop//uPy5/PwE+xfPbvQYpZRiNRhQvREYx+GpuYQQoW/9bmyWRZLbrl4E9lDnFdfUSIC0dQ7uelKjim0rl+qY8N4TVaYU/+hqD9Unnrwx9oCksD5PnIw/li/DzPMH7dz5eJT2YRaQ+zbRytjUufzLWOZG1G1Nq5d/zjY1/yXEXWjlIfsakicj+x1FbLu2jrgNh8awXEbioJxIeDbrrDrzJHD5foKd59uJ+L50WSg1aqc/CBryIPQB1ZsW1AQP0AUWtFH41RU4dTDwiTYUN+01YIfusoMLZYhioA4ik8MnrEh+cBfjdf+WiP5xlor/b3F46cweyCgRQBsykN/IeToDnY3u/4ZHouMQZ7Fw3dR8VSB5nkDxAVGtKP9t/exZ2qDiTpRz8TL9i7P1ftU5rmZWO8Dwn3HT13wiOBtNao4MDgPqTFnzPNA13OnyXgbXq2IMGFfCPhSTYu6OeKYcMC29420beij32tpc1lZ3ce3Ne5rtjUTT0QnSC9mP3HB62fT13HDUWfw3dyswSnq5omzYik+4XRgNLpZsPfR8iYLN7x7OQBByo4n0TY1jugfAyUdrTyd+zEv74mSXkhbca0DPm3v05KvN8pHss55agv9ty95wQ5k/pFAX4GstHVLwwRsu9OAKhyhvpy4RYEuHGjvqWhXR9LLtOnjwMlSCnXjPFnGrD6A3VSznM4TVwP4+ZvXuMftud09QV731L5QAQb8SdSpKPh4Q2SvfG5JAr6sS7KiRU3WsyRmjJrTp4Jnt9ETxNVMcZIlcuylOHB7WG1BXi+cWicAlNysVI2NwMyV5YEPwvNg2sxNQUfTlOJeYTSaDh32bR6XZOS9U8GkkL01zc68heFgN7SOUScqRyedab8RtO07dmXMwEAteYGXv6sFDQ8xoyotWiOrbH8BlWk/cFIpTOg2YytNbUOEnaYeDU8Y49R+7BNM1n88m3nwTbVGhUYift7qfq1BzKngq3T00TJgFwqdZ5G45ECuhKgx6ehxwLo5QD1t9byBzSpxQNDAFsAUsRA/7Bgf4BUzqU/awNCjAg43Zv5gabsB/rxMkGu5xrN/zGn4AIkWpaq74bNXyKK5BYvGkOIrFj8Q/fae/fYmRQj5dTjsP2aBKVzwWDDqMtYpdjXwfNXLSXq/uoFtHJAkv119PzVjKbiq2pAuCiha90+bWSkzsN9Ipu1GpRNpcY/R9skfpam4s+PElq/uDcyPbyfNDN6h0f8199S70cn8ZOkFJXU2y5POrkpkx1eQr76X7PMXN1u/a0vrhbzLRdvXKDBo1b22vbAhUK3ujL9ZZyx0XjWMu8dOXi/qbC8S9PkRpL1CZFUWIc27AloPlyALkqnLkNTi09dN2csLa8klSQrIaPdMcCOtU2xEQQW1F08hl36DGpBwtUpsWLmRHc+XVJnGKkd0qVP/GRIX0CijJ4oZGH0JBrwgd3PRa4M9DyN7bvWAk3pwfZ8e01SireofxrHD1FxL3Alv2PmRQ/k6ng+g8dtaBVbmPz6CdjgSqt2vvssWVPfaUxVuU1dBoA7BH6s8chm+mH6Zr22HrgpPyVbPw3wQ9381oF3tvQgGmuQ/PkK5SM3DNT5RonX8J4bWDWEwcncQL/IFwcabzn9yLTKC6sL7kSMegJskApxNwdyb5l/JF9Tm1SG7JCDp4KuoVNnesYm6Q35p6YThZUqn9wde3ZuSWRECrAm2QYZ3UrnLc+rb4eTPjrzQ97W5M0MlHtA4dixQKQ+HKu0oIX+RQop9gPNohqqPENHYW8lep2DO51cbcG6zcB69f+BN1qDo9b3b9Dvn9BsCmEf3dRkgSqnbVgj3zfiWWPmvCKbU+krH1d2nTCPgI+V/WA42w8Cy/pBbyk/yBj8bRfz/oKQkvVnPSi0nCFHklUA0DsBcErg3fzeb7wGD/7sjMwIJNX+Fvr+rC7SshmdJPoi0y7qjPng0QO6OqDHrKHHB+gqgCwHEPgtQFP2Gx10+nZxO9JaABT8gET74EPTDMYE5dUA6PTBjrQWAAU/+Boew1neaskpq61R9OV0v4W+A0I/J6sC2DO1SA3YfDsz5FeWuxldYmTKceKvymo+8sFfdBsC2DO/SBLAcAyEq2RXGVmMrynkwHOUhD/Xy7T9cE4NeBKG/SMk/AYvkTLBEX9L1mjw8/SmNONVjL2QGzdgXwsoBk4rWO/kDFNZoLtSCAy6IaRYJgT85hDQ6pBprX5/DlYY+42YkwIAveY2//4sVQxMuRox1xTr7yvgX3ZQorV4gFzYVi/lsgwU/adc8+syMjkb7L0DjGgZLu/w4C9e8SCIOzzuU/2QBfrog89QJnNhEPJyVT6Mwq9GUFhIlVoAMsWreUEhSt3f6cIDcgodzk9YSIlQYvBVrGsCcwvwc/pTstoX+kziRcD8I9hdEVvt2Q4uSebm4vq1bA6v3qYQidIN+eN9F2y82+x/8YcjsbGOuRgnn+MsgZC/dcACvo2sLNFQXZ/E5Xi8a8CSDQMtWvNG9SB65gfKyKqh7A/SIrep0N3xbH4lGPcOZa03qMjOrukCzJdxH6eDDz4geBscF6CIveMYTGL/Se+v+n08wE5rznU+I2yvKHcMw/ulhGBpxGp7qN64RjtPPdZ3Hzkc8eiCmPYYjIMVpbIbhtsj4xAyuiB+xQtEDnhTUeu7DOC3flpiXiY+3YtK5gE17TOLgYxDEjhXcp8aiZRe/xYycITUqLFTB39HQUfgJgsgnS9WZo6VeVRaD46Z+OMoIdNJN4Br+psS6mcyBClvKEZOrVs2/HpeI5OxnIUTK/dbMCvPFga3qVfdurcUitdm7d9pNvZeUT+4gI77Q7iWSybUDvOorMko6tMjzi9Q1Pdzyj90zmlKUTN2Re0lg0CsgXo6cUTsHwM4nwPF8Jlvy36aGW1gfceRD5NWoPbsfb6nutI+JJleC5aYsd3eYJSMWqxbNBx/wJ53eCwwsdn6nx3w953BnPm9g3u+XEGan2fvUHJSSDJKF38g9kZ+mfjWOQ/FkgZe6hYValbeeW/Vu8daeqW4UDUH6fPzWj48N/mhmcXPo9AUP+GdKYu64/QZTcNxNfujNaSvXt7LSMOPjZEDdWG40yyubsSz1XuXz7J/uNVe20xjEn6U+jZXVnQp+v5aJnG+IrloDRmHfqHi3F72cebEsELM7bUl5oEPDJ8yhKz7PHGOh0qwX4lz313VRChan6xQtKVua3LqDJ2z9GzFwqk1J7PX1283A5fZfEn4PUEfSMxRLqS43LS4jkNjQQ19rmoA17fJdpuHKQJlZkqyKp/fqc7Dm1l9J130QTM/PB3MpxuWynnxklxdnoxvGl2/OxYNJ17+sl8vW/DqXfOUhlHOrNd74X2tKFdpVtOVw5t4xG7AlP7rBTp5mS6SGX/amse3C2oC+0MQbxvEh5bK/TjDSw+Mm3uo13eJDD7oSZjNqwaVtlxPUsD37Ibe+V2o1bGu8ScCTD8N318R0+nVnN4lX/6FDlPsbv5k7IpYdX5uKJnU2/npUfLW333RlMChHA7/v0eHZc6m9y2bUtIZ8hG7rdo+w+HEJfC3CDT3NuHFVZOWSc5naKboP22ESTxqkI9QHf0mpR+IfR0xjiJQKM1E/5ROifn0wEnolc6CILMdqFErvXreJLa9l7R7IqAPWYGq/SX2C/kzKIUMhuOi0b1lXk6sOOUENykii+SPxadHRb54p3vHkXil0vOu/0IGNrhUfHvWncTDoeGOUo1sTlBtWrZKcdBS6e2IlEMyHBjDic1jF2H3mUwFE43t11NqEKdzCAvRqtB4MIh6qommnAshMfQoMpA0S3KmOXXAyC1lKwrnFhHEfR25SmBYqT5mNNudquG0fUL0lvtqvNAWi9tkatRSQhXucG3d4RD+gnjumMyMyRt4Fdgv0umXdvesfNbaHTOEAd5zw3OOnYWwpVatyxTAHWyx8PX2+Mh9KmYViiDmLVSTreQzOtk+L3tiNCwjG0XI1Bpxdkl9t2VZJJi0ldLF59St4KUiyWF6tfZ+8JFhSicvOmNC9rDitfeHoXNhr5xcc6zH/4278TwFRxm975z4aAGw9E4ABdPALGq3Qi0RJMqd+CjNVeKZquV9Pwpy9AHX/8MgR0bDnlY9yr7pLInfOnOmKDf13ZgAWjj6pwI+TYHeoRFe/TGBIX0Lsa80fv/CIKy5g6vL2OfDLN5Rv0RW6mHbhdTlLHF0uS48tV79pTnJ5qFilmPGEwFz4vd8vMV2FPe9Hb/lHIYaCWd6KAmN9FnYyBF7iPpdu6wmXUn9gCgNiLid7fdNnPnq22PUQCGicqYKO93enwOzSjupXxhQd7efrxFtW9+1PCp5/xT4TVHtwBnEWEh/9C73Xw+JY+pjTi7IiB1GO3DEh+hwNk7V1qB3OX/A2rH8kjo7fT+zAe6LuPJUW+QieO762GU1+OxzG08pP6rFbY2Gh+M9z7dlfz8/3h/qN/n0bHhD7GfGbw5Lsea5YmVpnw1Nq38pIJ8yLeEaKcoO3BXE6PwGGVsShF8e1HkvtgcRfzQwEU6nEch3YQiPNOOB7W6R9ogYsswC+1f0FodJ+BS0tYF6rx/nu7s4Yv8032PRFFcD/tRThL37KcIt38+X5xCcgdDkaJ05xQAuEkDGZwnlgKnlD6xQPKQ6AWwkri8e3OJFpi6gZVBV05EG9Tim16x0td2neF1tVz6xmZbzEYHBgRzAEN2fA/Cqk/vMTzvJMQNkikcqHUhD1ovt7i4WCwdZ8kwIc02TdWDHlhB0eNMueDVgRmV+VhEJmvsLv+sniDvFhmi1SISZ6Jt3jcUwgnn1yM+aikz0pbYNIU3iDjiLDcpfvaKmQtQz+/Jto9IPjImOUTbS2CpCOetvX39U4ycVHvQzbtNzVByVC2x9rNJloDf5arQ/oMZiQO1cOOnxvIouqtJREUkhCm2Fg8AojZAu6aBUWh87vimxv+UOwwIV2jXxC16tjwWHY9rGik7wtyPpEJknzH+oPPezL5mnuXd9lIp3LFh5ZNb5QRHgrffhUkt/aHyzbW8mFOx+EjGzyj/3ROxTSI7vFBAZUl4cLIWWO84JHCKNpM5gN9ittx7od9U4CW9nhEyfsO9TMgr/yQDTJ1avrZ8GqCRNs4sCJ9hSgc/187qv3KcXNeDoFZNSl8FT3Lg8vRXy3jtLWKj/REWUoqIz8/og5j2eVyRp5qSRKVb2n3c5dz6lpJpSSI8miebqzhaVI8P8Y64EVZmSuBfuT2KFnspOQ6rMPX5Hksy/fmyxSPduHqlwW96CUNUtu+pp/UzVQm7aPiYnNqDqn1uC2L8pFaTYrxQXC+JFTxJDe7NeUvZnv8CxCGLxgv39Oyw84KVzYXKELRi2JjuCLjHwPBb+NEcgTM9B8FFZlNKTg9v9bspU68iPTuhvgoittvwCYoI6+Bl5uVSnV0Mt5R/BJWDcNOBXunNMaK5P81BlQuHDfemaWg4lwibP81UYoaVIm2Wkg+VeoVZl5KMlfhPZhcjVKWx16bjuCOK+/LAynxz8/GujW3fa/HxJAeRrn5/yfpyDKDo+Pf3dQWYzjYgDQ5/dQG88pnWNu43xvWLBAKw8QkzyoWluWrPeuXTlWLh1QowYM6Rvo+09c0pF9qxIQ8480PY2G6+G7pEOgKN06YlsF/LCRBtjX0H1yOVrKOZkYfmI8jbbHIfjaNe1RaPZlkvrfrd5857rRVVmqOJMgXN2ag6JEsaXDfx+9QQVVUE1l7jAQ8SVcT7pixcvMbbQy3v2Nlft8RkwYfocz2IWKwdZUnUI80GP4rASEehJ7dG5NrevHQFOb6ab3mhmHDNs6TK+CPUJPPP6yCPNCfB7xBGdspuHRg9zleAVx/Zj30HtqzAZA2dOkW+/IN4YPKRhVJ9PvneYCcC5j/4j0R71sYDCLPg26UMn6BsGf5gfrxz8aHuPaytu27awpD7GPdOBDrmzlW5YEiPoOWlQ8KONprWuliiX1MN6kfpRJeIk1wQxYwTkGE5FzIFWGUORoM3AxwtbEZSTe1hOUeE16TAC/VCw7QMBicC1HNPKUau6DFzH2Y4KUMp0dWAk2pwyllfsWwxFFGAKy9vOmJ80mNqw+NJobc35V/SMKRE/QhVgXBjz2rEYTOZFicg9kp75qIeqwCJRBfCJpp+qtI6MRURVLPcIiGCJru8SEkTWvpj8DDIunFKbqkjuL+MAQJh1AFUBPVMIRFx/ldE0fI3OoyXriLsQoWeOdzo7yM0JwcruopCqOkA4nlcZg+sxcgyz0gbbpelh9tVnixTv559s4Z7tN4eFWSswnKYoamLhNBoK3DcWcYt4t1HRwGW3H/7epdqGOI5BO7rugmKz17+f7TFg6MfpeSQ7kOzrQC98jERQR/67hVqa7BkDuXWaQa5VL+c/4qa4erdj0buNIjGPfUYI7W58jEDX44I47p1OyBAv+UaPgxQwleRpLPC0oxYjQp6CvxIXlLHeQyOiblrl/PgYHdqQ54BVblHGYe0lSswnn+yWipT5SgxEzTqYZFDjsylsY60ZW2lyLBDHvmHWCw9z2sW0kkZbib7A5Pw3Y6tK2lj0hMayEUFkrUK+8QdGPOgFdAZX5Qc1Cos5qDgkKW7aOdkgvme0mT/SEy2jd8RAkkn5AiFddPiBAOOO06Y9mvMXwIa4y9aglvuJJP3h+Fy1GsoBG2jYjN+xSuZ7pR3/mWarcG9jfrMj3wLLxVqr4WdYb7WqIWNycldgfHY5W5Me/kyH7EG0HaQuhWN4U1CkDqCf5mbah6NDegGkdf78xSeDbNYjR7rs30Yo9rlst+4Iv2phq1oeVqtaZlgtS3AUVEMsCyBGxDsKsshdIBJ7MI//u//nwRBe1nvFjO2Jby9qwKXVAXMxO0eXdgd/Rvq5MRxpzJEnFsIIwwSZC46zhbzMAUNhVs7FSNWZCqwsOGDMkLOE8GBYq189nZ4vfmhT43riJ+7tpz7F61MnNdgeSC1FgMGYKsDDVCKHe5ox3W56zp+zwtgDFLpANQhv5kcFxDRmABCYYrSp336tQZEUggQaDm3vLzr36rn0DtMXdEXueegD2J5Dj4y75YfMf6MrUqKixrzuO6ArJ3xLA0UL+22QFcFEEffEk7QbrPBAxfSLj5F4FF9iwFtUBsDxMbUseuSDJiF22POKm558bE+2B8aOO2t4xy16BvWNKypg1WQFWh8bKp6Ca1+N9VBmK+jGutUp+eDopp1QSHuPUv7WT6dhr5uTrBKY+4VH7NAq8VXIrp6l3oAoTu2k4p5JRIeTxhjZYmnqYOhfNxYKCadnCYdjRLTI06/YcELFMu57EZkUR+nJi8J/sADkcfj3y9kX/p/bTvCFOe37gAD0OaK4biuTOXFdSRjMSyufppjukrdzHFZP0t0Sd18swHxFoEeLE4W5fybVMOfpdWmR7u3pE+JZOO0tTLWaFPsqHLVPkRxHJg/Lxus5q0GHRUlzBV0IB3vEUHWWyuCqJiqpx/gomUYNsF4P609Bxjg79NCwAZfubVkr+EZkcVyPm5FSK01aecSij5wm44GVRYriXHpqh31as/PdgIuggMx0AXAzsYFiLbIYJ6YB9KoRlVdIFhllwhPuGHgtzSs9J1PjrLthdZCVYKgQ3shrmSkFMZwtoqZlfMDwAqdlgBDwYWgEDPOTTCTMHgO/O/b1DnS4MV1ST3dk/rR3m/bT/2khBSCPbHNGHbt3UPullzroRk/ets7UDmRg0Qt2eI/d8bwpv5V6xpKld2sC0TRZg82ULYJFM0SEh/pGMqCQ1LPLvfnTmPiXuAfJXDQzV/E/M4bZGvKvuUA07qBiC4p5BrO8SphI3AuXwWGWuEvrYo089d1uY8ztazDscf2hdqKOGO5eVRDTniJ/QBbMtRN0KtWIThLolsXHwYSk6eavghu0ndQHmd0b8CJ1CRONfUhuxV6tzwziF8mAWbHzum+IT7HJIiMPe+r2yxhIFHePY4oB43nwje2M6eiLuWWTHM5753oSz9WjNuGpBL4/tiqGlcfcWaYne64VGPhWkg5LI0qZUK9SY/Qs8UQeMpf9g78mWCLxaXSY1AYZYaS3V7L95C0y6tCaJlFohjMUhkkyTzNKxAms9BjBeV01gPqFZwEFel4/S61iTGK0MiTmTQMxLoUPsu2v1ma+kC5qaMW10JE4FjdyXvnz5/MYOK0oYeFmyfPhHfZ5UIyHoM153QkJOLm5Awd3mE2I4JC6p9IeEL/fY02d/TSayzaMWUkXBAQ933AH16AbRFgF8j3GiizNuwmOstsb4HkE5YlDxT4k6UPqVDYr4IMkXfegU/ubvBWog7s6i8wdyIeTKF7tg8fsuqT8UfzPtWDzCBFK1zfE5rEAtyktSOF8E/u4pw/HzJB9mV8o/pUatiZ7FXs8NawtSu75qU6ancl4uM+fnoHLYhXkx4ookfNPjwAOW9ijkOezTLh27OpVYv7MM7rXRKChPiGU3IdnoSynnxUJUY2QMOC8EpY61QOGkkBLLLw4XSRXsS92UA/yFaFOUoaAXU0wEsMDpgEbpj7wed6zPdq01uBY90gFMQtLJ1+HWE65TIUUZeU9ST6CD0b4klOergytzeaxsY94mByrWii5jkYc9APyrn+w43QMRutTUp/GFyctiJ5UlnpV6Qw5QfRjjEMHL5UOXCULIOv23eUFI1I13bCAjHtILjp9Z9Lbg8yONo5XA4UCd92SmMeHxQB/52NwI4pcdAnZnpVeHWsVCaU4VfvU34yznlQxAdd5KpTDM6T4dxPzGInSnAykZgeGxbLwqCi6CIMVNoo/qvqswo0VGuZ7U6+Htvji5PY8D+c1E9rPjdlb7koxeq5OIqnbII3jY/jILrv4U/ZWoj4XCUIY7RFGfT1VPKRCjFbnzVbXZY3GX8c4ur/wXW3t/cjAaRbHrmT+3TG0nuaA0+uFcy0hQM3uyN9RPYjM6mWxb+BoxS52ur02V+QxwXxvgUSSmfFUbVXbaNa9u7m9AkJD2APg/OD5AmeIb1AAXDhpSPGkNbcH4DM6GhiocltYqoCB9CjAxpK5aEZ/CuDijLDIxF7pjAn27S659hSxfV3ke3UcWhZx0Ikqs5J+1tTq3BzexXRWHHvyXLz9Ral/noJn/squqEigaQhO0wuvoiKwNWiu4JdmkubTHSsaX1vLYVKZvZuY3jGeRRjQTHKs9gikCI7Rw5RGA2+dwliQmCQcrijAa2MRU9szZ2wyA7wWY19NcdD63HeX4ND3g2jrXD4LizsjjEHe+6A5h3pMrN1MXgl8ntVdkM/3+/dgb6n8IVO3joPfaBv3SLEOw6qwTulb626TzKHFP3BmD/VawDglaOksmWTZcp6qirdq4U4+K2FX1yd9nS29/DKnJjhN7A8ZUR7GJ3JDuVQPjfJGHqLtCzzqDXsSsd8leZPmsEn/K00BcYNSMEE6i/EJpo49StNt4lZNmm2rxO9VMXqJwy0A1xIed3YoNEeZeKfVypDXj02CtO87Z0glUhRaoKX7XHCrvBycoED6zcjvVYpCSDV6HtcUr6D5fQb6FhkSeAnOPQqXNbrklUx/u9LLTXgYikfa28wCYpFIw+kXQI9plkb0S600qRICw4c7eRpFu5oyOWSwjThziiyDUTOFLNjtXyR+JVJGtagUKE3BmCalSZHkQ8vlYNQ8HRmthrLWCStPzDBxoRQwx8hYCIHNg2iXawNLfoVj4srvI1/v/NPn7u20q7OgYTCwDqI1YnWDokvnW7+IZ8u8JCYapbG8KjuZbVgetodu8aFvkkKnHf/xj541yL6vHNATKCu06yg5PDwQ6j2VwVxLUK27sGNGKf0Jca/i0CgX6ZfXoVG/NjwobN9i7TT9ugnBA0px3MXSx5hU+9l+Qs58YJnWzgfa1ZeCGTJbVJyinA6IjAemJ+/qNdoUs65LgLiV9VD8NX38LfTxm9OXduVaWfrggPNgsp9ilEpgtqdieQEMX9dC9lP8dS70r7rQtCEE//kgI/inCrHa818Ge0SAvfJezzC50v7XLFKaPERzTdlH6d3BiR25Us7CMERRjZ66nnjUlGaShmHT6xqFPYc/NDHE61ixZtlL02NjQlWoCyo5q33iIADRwZjejOKeB+gEGcL0beJBJ7pLoymjdAQ/VWN/j14hg3qfpwEk6VhGnaHklV2o+MWHkZBlBczMK6CCAnrqCsDXFQGw6d3eF0Rlr+DwcKPZgBSkHwp7TU7TX0TZzl6apXiI80oEhFTBPV6FPPwHsp7xPqJPuM6Fn3uJEvRlB/Vk0dTT68z6RE3V5QjvJxyxf/2XFsaweRn98qwa90TmBDNOAfuatfUmheTD7ToeyFH8LxeQluv7doIyrvrg06VwGonijVMZph4d7FCl+t6bEvRJtJ0WpP1kYeO2ecoN65e/fqb29xStSnJGjw2Z2r7RFDoAYr2GLgwB0bOLVBcnzhjhTbG+jPXRBccA0OS1iw41z6kZt78kwc5ZG3xvX6NnKZ5rp/Hv8Ac7muDVsIoMxzPkmrfHYQZmXThBYlPbswlmsTmpRZgqjFwHIvtdI8Qtor4y+qKRX2Byx12wnQyZffihFzp6fq33HZHi8mXR504mypPCJCtuV6B96fvgc/Pnqz1/s1yWi9PmcQ4AMLaX+f8r5fa3fl63vcu43S/ae7xuf6xPbUuKH/r1hS/LlxNd29/0tjwRk9Ww5AFSp+KSHly3CjVioCf8J/dy/kMZHr0Mf6ggyxNBs1q0iyJMkm2Gawu9XqbVp2+DP6zUrY4gchRRnXlpWtLYZ3Hycn7sphrgLEaJ5gdhsj3HmGMrrJxSfqoKK77HIowfBvEiDuUQwye2O/jsZPjigMXJ8NUp8O3/g2funuTuf+rEUjtLHXqt79t8llvt+ot5UDd9NVXLmMwtx8OS1amUOA2WqbMTxZbWNikri27botvmYSVlSW26GkodSs7X8bCWtF1SQvNNHUMbV8NbSvVISaWNg7ba1GXVq/sQU6+asCpRo8S+6/J0qdWuxDiotQTRRVRV6uTUxbFYlqTWVFpdlXaQlJzUba1RsSN3XdRRNXaxFLVG147YvlFrsghtjBKjlNal5YBOG6NEzSmGdLX5f4m52Is2itKW9C371MaXop2GfzX742bu6I/6KB+i+6EOP2xOxc3JEjndiO/q8K91K27WvmdONxn/U4e0norb6lVOc4t7dfhrK7kNP1tOtxFP1OGx/FYesntUh7VtK24XvzOnj60LdSi2SzEVP+UUaWWqIobB+lBMez8yp4vsPqrDaLNPpqMfcnrbuj/qcG4zJtPiLXK6jC7V4bX1pphO/ZLTXvxShwvrVXE9+ltO+8Wd6xA5XFtHcX2wbcOtKr0XP9Xh2vpYXB/9bjldZd+r9WgbxfMw22yK5xezzbp4fjXbrEp31YqcU+fB58tLHr+rcQyOouV9Lfv5DKPgT5WI+7UrWwllkLB0+Dc0NPSdNAakoUzMGVoabuqtvC/yGGqvmiSPv9W1vP/Ko6hINaOMZyVUc5X+oS7lvaFjqJ1qkI6/Fdqblo59haj5osNTcv42BR32ahsXP/dz8Qfz94yU9+KDFub37Gf5ASihoCQmHCJ6nGYNJJJOE4pCjstDF3BMc1igyEJtMlBEwXVMYqqjwW6gUI8fpg1dLcoGip4RDXBsxwZGYDsaei+QUaMDsVtzxxKKZkR9pEBkcWveEq5buTOTfd/tHepoLtYZlb6FvXeCnKIBckZJ3YicHvQKjh7b7ChQ4a03iOOwY4dNeIbdQAezxtEhGJoRXUbyqNEhbKCqbopN42IHMugV3GvYJNEhsCy/TO2EoggKKFrRNCYlBGwSKAJWiiE1CjiKdQYbyJKP6byBQj2twCCW8mXRHMmLsEkgasOJCI0DVmftV7AMisajuXTOmMyi9Pudx+OYwl4gg5qDKOQKDkWMyPfRkNMS0BR+QawaWdKzhAgjlP3lXHWjnFv47EhIhbcdqNyImrYAWlcGv0aTV0VInNr24EpTmiG3i090yEz/4zRLaHsJZchg7zdFD7TeoMiOHH+0/s6JQzCQ4Mmu4EnBCPsErTeEMDsa3C5kqAdq5xaosIS3DaJw5jIHcdtelOTbohiaWzgpWBS2gmMPA8f0TtB8ngi1P9PIrmtQ5KsBbqZ5Ke1zA24rw15m7LmhWLGEUHhj5loyqzIU1vDnVn5xeQ4yeJZygX2h5hBxvd0s4TC6GIsoHW6Kx1PuBxgSWxYGNa1K1cml6xp7XTq7w7Xxg0NrA/Y3ciYv3mnCAmVqh6YVO1GUetkZs+1AfWDL3rGeGeLfw/s/DnTu/K5/pvfp4xR7VXJenoEfp5sx2aNosauGjItSCJ3OBOLrveqdBG2OWfgygP5GSwPJIogrUTgEqSwJYv8MqBZ87wTMM9TxwIGHqmlCS4wMX+33xWAjz8AOqjwrG+D42rFz4AV2LY4GkHmkeRG3FcvVgIKlxxUmKKRcLBarYRvuJ3HrhqUbNfGPp9sTpXwTt9a519sLXO72hSPwOikM+QyHkMPbb3bpSonvdh4J3I7I+HseyUIHARRRkpfTfWJyPRz1aZ/Gc3BFsNxxVT5odxw8vltUJipgIf2bRGzW1XvwQULZ9llMkVpyMkoNAKLQk6BohPzNTgKRg6ESPL2dDZWso937mwgb5IxiRCygkFnYCykd6p3P5O3FvjY9EphMIjNzMogsr/v0jqV89Qx0tRDdyORFDTnaB9PMgPU4G87pSEUdP3HsICPADxsF1okSBAflKh5a+WBZ/zsWj9OIxIXBj0YNHD4KQUJ3A7OBPIQWCjgSWKFHB+7dDCiqL4u5owMU8TvF4trGEligEAmg+SMwI1YHqlKF2vIEZRG/YeDyui8AvgHsMrlPI6haj2NSa3l7Af9r1GYxUbtQQtFKV8gsUgKyNGCwoROmY0peG6CFQh3JC9GBOb9Jz7EF2kB1OBvhBWucR5QJZDM+UhYXXGVulkuUOVj9pDfPh1R+Jm685k4+4enWiDWdLaqjRhynsWFfANIzjFVPMTVwOIMAnJUxSdcWpGcEDrPsb8kyE4WDlfH3YNT3nRynSR8mFN1/gS1Bjw46KwJ+TmB+Yc3DN53ch8nZUaCaB4pqBoAXrZBrmvgy/c+RYA4NsFYBqFMPBToocAFEzAE7qAiGrQEmfq5O+YjxevUTIraf1mcAExWwfAja6WjXfeXYDhTJYuQ7of3/7MUhUlflNCqlLZNaX6F+XDkuWkip7cD7dEsWkXmkezPZ7+tLMvZni2A6CDsslO0A5l3R09zbt96ofVhj6FOrk9FYxPVGTSRI6ISG327X6j7m5CL88+/0ojVr0INEwVRhQxWZk8DA8aE5EvhQyIwIFHQ+9EL7aVCQxOs30VJnXneFLPFw1DB2AYpqNoijKzB2ZHIcoYBuO5unHQjo2gxg6oT4VVNWPdPbl2lDuE4DvgcQe6CUVBOwj81Ehvn2d+b4w7R3hy5ErmIC4lqAI17lEXBBA1ELE8SjZCgw9QgS5Csmk3nHW+L0I40B35OdKFzqZjdoCgN+85GoFmYv6EEHCbRLTvRZSIim7B2MU9dZ05ZgAoGJpD8bj1mcDAoCNyIaBjNjz9XYGo7yJdIoBJ9IIuE8Xq4pJmdTKESaw1Fj22Hd8Xyb2F9L216nYlm2lFMzmpnzn67z+0S8tXRvwyJ++tIGKvbaXw3Tu8XcIymjANOBN1AzpxOFo4Q4Z+gY0c1ARU7NtM1YMLdHJO473xeUcu9wM2jCGY7VXDTUDQfjREM87bYZTINhTKHxPMlidSZW3SFlxFOnjWkKdSbpd2Wtg00Y3MXpoIUIBANOwI2/PQSsVXxdDcKAvT9+qkIGOlWmhENOitr7yIcfqSoZwQgXPw2YaKcX378ukoDl2IBO7GYE6z79fxHKyhJHxvvKGWCvXzkD8ABMi2lw4QroNHQy5lEy18nF10snKiIO3kliwiVnPJpWiHd6u59wLB/y9C71iAWwQFHYDBfc2Rnap5gSAwKjniDpCSFragLxg1w5ONFaiLJ+ZRW9mAU+5aHc+Ir8cuAgvu5T1dl1P3bGHSTg/1k6/OVmeLma0sXMPVGTwoUc28LqEEm6aIhuXaB1cxzQ2Nl4jPMACNo+c2SEM6HyXCGpvpYzzwY+iaSjirH/mZwXoCYom5fuhjzwe0SDjYw+MEEUpn4yMkRtdXk5BngLtWVkZ5NeVklylbEJJuipTSIOhQZLExsgciiRXgBXjC/IfLjSTp7RkdjjndyWAnvUlPgazOppBb0Ry3s1CSEjpSKQSqJejEvszKQ6ZuPYDIeNqvewi2hir3nS/08NGj6BLOpKycTUgvwnOKTcuMB0sMRhqUeGirMsiLrDTambMgqQ0jOA0byN9/QQqvIK562z1YOLu+Q5y/OhmrehrT/F0Imj6eHP9YN33K4yqroxHPGbfdxSwAdteuJT4eR2c96++/ar98ReQyD4eXkn3S02cGlG+3RHwrGdRrnSURseGG34exYE3SCkJPc2SuRv3lpxnJKEsGNUGcQpa8PhwlsbuSRgLK8h1kxU0LDSCewhDoagNRkYnFtDX/u074Z2GzPf/tGOHi+6UxDeTSN12a6QATZ7lqNpR9PIXAeD2jT8ngjHD+27ltvzoYsIbSTGrswNe1n8J/78m/x6yknMv3aZ+nCDaynaIt6tf00gfcgKN8MpH0cYVIaMMbwKyM7j7LzSWXL5Y7FYLCSaG6rekBA+Rn/SWs1UIROwSnSAvT7kMeEisu0Tt2MEe6sM5noyLM7t5TzfqcvVNI4hgaFO68Q2dVyUQe94lPBiEqCmjcOFYMT9Ld5yXSLPAGA4dPhFOQDVTmIrONJ3LlCjem0mLirCsMTpDy+gvFYMdWb3RY1MViHjmWqbSonELoJixhL6wVqnQvoRjscmqFX9uUwX6RiKGBaxJ8U86ed3TJnol9RGF8UCDKcrR4k4qqxeatlB8PMW7oIuviiy9IXitY0TqtM5hMAqIj86sUzNAeTzlW/MhK8r5zZo5nlR1b48Yc61MMTbxrB+ZGjtQi2d9Zr++TWCIV2wUcTbKQNH9DqU5bxg+eqtmSDdIWHX9ucUD9e8mUArunFTLkCkcR4zmTxxEW8ULdnDyecKE5r2hrgAH+S1tmAEA0vdiiOh3DGCAo6C29/U6bzv6SYuDad3MsHjz2VUj127tv0c8WOyfK7E5U6CRSROqsJPmnoqH5tQ7rue7hc7iNRSO+oAomvhXNuKW1H6IlXqL5fzw0ctzLDoCEEmvVJgO2Sho1orWBCZ7+QIQUX1zQV2wXC01JWAo1zWpN7QqKp4Yd6LUQNEU1uzzF7IAX3vHGjqK82m1ATdw7/8K/mt4P2iZdsLFJW49ES60x/7dGdwl+2YcpwHFJzhZCVmHPI4b1iJLfAMZyZQSMdLU2I27JioUOH3ZizgWDiPU8CiHldbJoFbZ0WuwWuV2MqnsnQ7IrlqQIyPvvFMBnRrHQ05cQ95iEGW6COAREafSfbM8XAjq2gj8GuFBOu+WcQ4BMFjHAR2VWT3wqE6sI1DRaBsucBz49li7Sh0CpwmaLfU/VFtsa0QzChOqBWGGJX7K8H1blkkHFQ5XRaNu0x3AYxakTnP5HffK1C0MJCK2ieEsQ5g8GK63MFI6MDgHUy4B6m9XdjNS0RW8LLn5zPZ319DEv75oM9pEP1Qakxh/Ib0FOJz0MGy/5ikSXy4N4jjyhxddjYl4YkeBVj5/PV2VGq/z2Wdjg1YnQW03p4MB4fQijCAyuWNqCDVOpiWvC77L3NlCIIBVT4ux7KwjmAUKCkx1HSrx7w2rWas190Kb10392bq4lwVqFLVozETC8hQZJ42C5LbiE5qkswAbie96G1aRezSaHwd5OwEOTJMmv9HK9Dntl8p+R+0pchM+Lb7YgtWdJLNECKLr/D28kRd/8uw2EWwNcdX05LS75Yys+vo5jiX+BrtulnL1bCocGBvb/kIuPl0IoY0e3BctezPOxXD2O5pJKKq2iHTlhl2GWhQbeZ5P2zQFYrvQ6d3U9scFQRYobN55IQhQmXCLugA7oHVMaBSJ4BHoPBslOTTlSN9tginMbTBYseFRdaUDSojr7sha/soacJvMGvyqEQEtIYEhsAj0LOuc6QECJuSggKqJLgHjvKm3UDN4gGkG42OA1C6anRiUJtX6gKFyFkfQdLRL9TVd+FFQ2IKXIcvwGZ8sgZG95AtEzwjlEyjsRQqHIaLv1n9vJPvCfaYihsByIvbC3PJrkuBo1xxXp7PibQNDZwFrlIyAhYoeZun52MY0vkWeYKhGjOhrv9i/yuzbljJkRZLkYRDBOK70lShWNLGKCrVyZ2eBqo2BOYDjGwKrJvVducnnCOG0fttjc2jKZEEkAk4++sXxHLCvNhAT5YIqqgEH2LpMXS0/yPMHw+dweIgf4Ax//98q4gJdz4giuAZV1iJ1qUH8N8EpkO12zOB85HylDjUtcLkQT1Fo7NvIFILiqivNeWKTTainSy7LBAuHGSbpnV5TsSLgWdYxrMMPqsXuDskpYLWEz50edgZbkJsq5QRGbUvvjkLxBeWbP8n8W49T4aGTu2qWplvhHFBzpAfIzX5U0kweY/ceNmG8KQHbTH7OSM+xiLOABWD27YPQgNre74eCnh9skHTZHUNXj+Axkqsw+8XMA2d2oHxGI05KiD4qEB7ZWrTXiont0nDeIlxnr8bd+lABScrHSfsDtMGc9XXiKPsxfHeVUzc+9gImA9OYBafTfFtQdV68kKUufZafbmSvK6EDtR9KM4cUIilWiYNHU8Gkas6ewk4TZLggVlDaDyzgjdbUkxSUThIYheA5MoweXy7Js7FvAPlKq02LhbJeONUxJNhoRK8DVbWG1pBv7kIP0/ZxdQGbrN9zyVnfJeaSBp6KMknuobCWFvcTyG9mhr0YExuDsGwdTeT4m3FbikviTaPmqAOveO8lKuHrU9ebHR4W0YK2DG3RxR0M2DEN4AEJEcYOU3MeETxqQIXoY6GxmN/ea4mgMNlgJYXuFoJUUGBC5zjq/ExCBvz8+7b12Q17nTo1jUNO0sdujKMALdVc0ski/3ytdK9hyDh7lMve7FNij8hFy/i0QFeLQLLFpYkTdMoTGDpcm3vEM678j3F5Nr6ZxNxOkosICBn6qC1In6u3ZUolBfdACzjApiKZVDvxhADbuUmitdxHkvzkrs7tfASW5F0euKg+OslBKwg69RTthkNe7jP6clKaDGMqCw4xb3yPEZQf0cthkQ/mXQgOS4lC07DZstnjXBULp6Jh3f1pBHpycJk/yvplh1j6mR/gQbYOCc+Uh3yvCLSb6/68d7R7p6X5sKvMTxFVIdq7dc9cwpGJhao6+BE5TXFuuH8c3rwiveQFp9AHYoTMQoG/MGfQcu86K3MlM9ui7X8CXrbHr7thOCdbHiTmfLR7X9ZoHxK3my5/O3enSnt2OiD5pV8R3mdsMJEbTb7oVX5ICHuE5hQaxtknX1VU1HBXNOZRcff01/8qYCuiEhebxVlT7pv4YU8NLRQx2gWzofGJ3nhIkeNt23IoiEmMetG/Oyo6vbpotbW2C5Er6WbHZVwHhS4TSnH9TjV3AMKkge+fanTmduuI7nAfuES9JWofJXw7xr8nXnjqpDE5O/UlAF748s+bTCQfPDXl8UDwZoMH4TcqKmUbEYJsHeLNxSV8HmbJz7fdI4CTlZVwKXZmu7epZzLWz4iqWBrUmn/LS0Wq67twr3hy2exv2UWb5LWciZN6nLsbT0L7Vn373vRJ9cMqcXlIphnw3fHwnPaTz2Z1c6GoHjJLfXogIsIH/I3fvH+y3SnLM8IEdrfzPSDrk3qxAQzUmfLjVI8lT7MoxT8Nqg1eFFPk8KIiVpemi4RJcGnLtiMUhkVImPqefAe1MdBR+0M+Yc0XRl8kDh5YNx1wdd7Fin3Npv+ImLCaX/Uf6++7jv+yjN+wGQmTo6G/qj1wvthHUtV2qcv4Sa7kLNnWNQOne4znUvD6jOITzPFDO2ihHDiPux8Cb2I97l/8+C+ChnSQCAlQa/RMLowqtJnTAegmAaf9rRs9+m9Xf9/1j6OT2FwThsspp5URmcP+KySXVVO1cT3MuKti0Ctcd0N0rGEMe+l5gtYZRxIjlkY3KUqP3a+icxwaZiHkKBqxypg81D9SNQzpXi7vheFM//9gtdH6gysGT/t8xmBAum4qFuMhu17NSkCcawDHJLS5C9w4QmBxbgedtjdV9D6kB9eVmL04HK/btFt6Va0PHw3bGkMfq5wteW3UwgfqA3+rtAQduTCn8Tg37MolDPuW40Uq1qoat+XKmWRrvfe6IeLHOzl0t3Q/T0pdS17WLycgyjUfUFTCcCtYyNXrrGAlPy5KpH09Bz5r9wwNeghilNhya2hFbyeLzaYg4/hKNdDZGCHna+KE1l8mb7xLqqA/nnSZRjJk0gfQ2tv++cyNHy0EM4itkDnWyYUYx4qaYxoW4DlP5fk0lHQQhCn7+vNtUDn3c7HYHcAHB4Q7yGoUz4pUZ5epoENiS2giOf3nQ6qs4PRF+Fptx105Qv7D/8cJL4WWBNvQG4N85JbX++d9Kz5w/7uHz7QEQZrv8FMzTFZESWAUp6y1Hn1PsltgFV8hYhot4oEM0p3+jXZMhia4LrJ7/upt0qZBMCx/beNKr2bASW7HkJiXR712sB4isfJvX2Gm6so5g/W5cDIMholclPcLeXFhyB5RR6X4fDg9Cx5D0BFXsYR6MUXBheaeuv3ToOn+73+OtuQOak0G8eXcBCqdVPPf66kpgAYcnxyb9rlniL/2495YEJ1FukNe3lc5gY94rmTrNiLvIzHYK1rIU8jmx4TeYZD9g3q1x/1UL7fCa6zQY1F60pxDMIzhB+5c/8gN3oUDDoZkbvLezddRzlxlmeOjuOWtPk5EAx6Hm/9dwuHFTmGC7fvg1GVGm1Uu3ySV6PhXyVl0e91Ife84TiuDZTsCnSNLSdBp8VGXrxJMuJb/II0bgDtGKxOv6kJrLm/1Clyq/naRM8FOJo2CvsQhXM61fc0sj6Q+cIgO/1xbR49CmLud3Tl0Ioem5DKkAFgjS4eyLhDxxyh/XZJug1rGl2/Dx38K1B2OVFB9EK/DTYgjqmVUM9X+Hs0isjXrxCI1oMnlhbi4ml9g4WkXPGN/l5DUlsdp/tw0EPN7grB4rxdNcnZdDgqv6f980TrFewfEjs+DtuPDPRdeC2Ci62jxaGAo9He9xHejgG2FbTEwMA8yJC6h2FIGmE5E6tDBK13dhXYqEmd5pcSQOYGCEk5VRMNCqTz50JY9kPg7STSq9g3QDWJCZqwxfTcdOx+wQo9SEVsSg1kTQRMV62JL4HiwBraAsv6Ro0CGgjoDu/e9gkb2odEtXhwsc/H70L/KadHx7f0uXY02JqmZ54hWbKYQzB0K/Na8TK9gi/kML+fZWSGdXe96qGBRyKAzzkbaF7y7/u7wVT78/MxcOIbGEMNIaxLGz/iV4igeMDg3VgA1B4OPycEsRJF9Yur/PCH4Q4gtxm/F3Dp4VEH+anrunhrbop7gIN/LhD5cULJGJDFeo2P0O5Pz+l+Ov5elLtbDSvlbFpXSFxY0FHbEQboEFRQHH20LU2PK6SQUUzUkmtsgLWOPUl6rJESouLLiD9er7A6jNo+KxS2fQ7GnUctl/31Rso6pcjGMASr78/YBvvf9IynJbsbQNqB4yHHMK/WUtTeBIqqcspP9W6AZmH7Ezj9YkCBqK7XtAFWbE0dggFucyGwUCr+BWE8sRlht9OnYlGIlUV3n6wiYz4j1ApJBZW5gThs+Qzvot+oGkaC7tm0ITaFpOs6NEqhZCX4ndGPS0TwR2p5dbhybsY2h/tdPemtE8XQbY6SwTrX8Mofo98B5dqs21gm0LCaX8BtzNdAnh00MyLOcBc+Xc3omB/luljI/VlmjSMrYvmk4fm0Th6vWCF+G4nquqvaihGkoG8RBEx1PwHS4BEe0NKhtMQ2vTifzh8VIPCTNW0unqkD9pd4lYjfLZTTEMG31Okv0aVwoXVuIdV9pUpcvTQITXnFleZsuHTyyjaR1/eyGJr5pZuin1YdnJIfS16Mea1SallyF142Jc0vdRGK7msa+puppg9MenfjV78Qb//HSMjksCgXpwP5Gh8eZCHGR8SJJb318T1mnLA0fX7V525aUT0uajv1GkQLbtBGOARipKslolFliOhyh1872l3J1DFlNAdTjNeC8U4sPcbLymdkya0ZHOp5+cHF96FrTyN3vybqFV4t1156W1X7wZ6Mkb21vHwuL9E+D1bXB/Rn9SLPASmZzmdbKm4q85a7O0dUjEK3xjoHYGSFM+LjCXmA1VM1+M81pRYTXBhYpDubWotDxIBej8OFz8tczSG03B7aY/ZieWPZZIBCqu9MxfdO2wGUeTuUVTezdTPozP5WE6ugo03DCBqi5Me4G+Xz5ZEDSFY/O7AfgbPy2UQ/HoNnfP5EZFUvvJURvt+ctvb5BRynCxgQa8FhvoFt2fDaTrjIyjDNPbw2WBDN+/mZ6WqEqx0BrROeVlzzEnfoFnxdRYMBGYq9HlVvOWfj8k6R8qhThl5OM1EgvqCHqQ9ar2ITXjjiAXBkvYH8hRztsicOFd+tieu1g5WidRkf28RlZpYNjGm1YoTFwFiXcyJb3MCe14d/DS1Br3K5pp0TP729dqEFVwjCUsTYBsexrl1dFqDXELz3JGVDo/JAru6n6moxcuuVQ9iBkQQG2msriJDZkliclcxV0GBbeQs1VDDnnvDyo97nVfIkuk53B5Xh0JMFvcX2TUNKNuHjVI+/oGs+ZcDbWrobmL+htu+APCjtnsbDKAljhdkVXS8s6X7Zl8VCwJpoUG/bUYtrPCWyQlyLkohTjpEFVlD86sWXNLUkarM/Rc7znwg06wYzbksQW1DbeyCPDRa1ZvTUDIp8ASoJcNe0A5UhE8PSgBG3RKjgVNhi2kmaDV0VppO5D/Bmqx2ivNJhLgNU/CS9KsL+h3hF96C7SmOKSLzurf3GJdVduBiNCvdROqVObn4V9MCv3a6HNlBYNG89y9HUGRNzz6LMG7ofsA8mJES+Pmwck7Oabb/0JHe0Cyhx/rvb2KNAW6JSOUGSMCqXVqKrjauxZmPYNPuYwiSh3nmu4eGuXmMH7RflXQzEmnononAlsm4zy7CI9geVLj91LUuWd9KWsLcOKuo9YpP0zGvaTkrDXqZD3cPPWxtieBn0uYIA8/bLTLJkk50gx2Tug7VggZ/T0o5JqoBf0Kw1h8jyaninwmXp7endSgwysOKgyEjXNTUyGHltdq6xn+azFXBpR9cySATZJtRFy3Q8lRKx1M/r1UvypCzNcPznIFSVl7NFbg22VSyjmL5Fj0iqMwtV13mQqlB7FrAmFHAmlPs0i7e1E0AvtbhELcRVK39ewLxku/bcoKlUvVyN95Sx4kMEb7dr4gOzzIgClHilX0cUrqO/Bf32lc8XerqL4uFmGT17L7iajsFqs9P2hvcCbzNi1z2N913BuPplMxqSwDGW7fqIgcp232v0biJSFI1XVWhltZD9ezjL/I7WjRKPSXiKOqLrR+JlLbBywdImxsxXgL5OLygEKrQoW1F06kjOrlUlpY34Ok44IzlRopEwsU+J9umyiZphTUdukeUJUjRljK8EZA3f6+Jo3i3/EUZcOzv08kFMHAvt/ZxKX7Fq6KIakoprqcMstFxniLII4jkihblVc2DGcyNXlVF6xa+tjwKdeV72CLYtc2X89ID4WHs9Bznz705OwOvUgd8CU9IAkDTrFz3RQXCOe9Dc/2vAqhuDifjzy40BP1y3yJHZPLhCs9U6gL1DQdlcVP1Cln/ahwgliwWkqC2iy6vPGZRnMleBOsjnhPuDb4zw9sjehKbSyhEP2fd4e+gCp8CGajQnmKbXrmClRDmViqEIpESlkpYXE1cVm0qRZWvUYK+rqghQSLWQEeQoxwUeD7Ebm2bd4rO1gXrApztEvmE3SQ6hHvD7K/DurGhAqDwTVBOiu4xTtBTDMqWBpkb26WbY5UXzUTh6t2nflv+r3eiJ7RMFDyM0QEK+YB6ZTDel63izzcd3vcdSGKtPonOxd+1jf70ecVyafBTDjLnDR0dVyt/74m5cre1fK97v1uZJ8S3Z+y05THl5nNZGPQh0XTW5TscXV8Lb3rLKw/yylLiyuJVZ+v16CTTngESpWCdmpWJEXVgrXcThR1qJDxOIOocuRlTWhDtNfCt2ogg8eUqlh2J7/YZvtaBHtzCWx8kTsP2PCqogHD7OofJ6pZqAVZJ0giQ+ThWg7nLCIkDc9nUJtKVsLCm10dHZN8g5/GRA2F9pKhrsjDktV3i8vviB1aWzEEC51z7xvd8byhrWxjPktmKuIdh+rAx3irJL/MfpwrFA684LASnqujwzp1MmSmjU9ZGkA9bQCTlttHFW3XiobyzttI8owJCeEZc2/NjQiSKtSI5jT9G31pnWTcG4H1x3DnAo+TDtDX8boiaPE7YVDfVefuwI3PFCpf7EgrySy1Xj2vZ9P5UNEnxpbrsogWkdwFflnELMqVsJJvLLMdxpnRstPzFFcnTLIz4d3MbBlaY6w6t/2+bd33lozVG+l9ULhd+I7wPI/8B0n1EM2LQMpheglDGoTRpbOaBmCbZjqFMJnMzS2dhwuGmsGte4fZozyIu3X6clfTPKc4lA6uNxKrQfK4GB3QyxPbRPnEE0aql46hw6+M2EWmMzZKQ9hBRdlEnFUpubS6pEjzZRY+HN+pjf8qPdmEm9ZdVwMzhwbtxiAdTV1yMJvFeBOZzBl7fTgAix97Uln8Zn+IQIuHDN91kABm8Jervu0gK4wkvqYdyj9BO290A6OFzaeKf10Fd4rWfnw9m0dTHR0fbO1Mi3fB6Un3AWhbB2PNuouQqDFhUnaqte+tAT8RCYjxRA0KxepEYLDst7uN+lGHD5EoA9KSTM2mqfCavkDiFZtY6p6pOOQOS9jKnbJmvQL4eDYYrIYlrNCAOJx2uNvM8qHSHeKOxjvseCpbvPBKatVW9hI4fVRRhs55yDMmWD3AQf3f9bZ2IRszpJLOXLWkGuhq5yy73g6uos2qNgP815GnIRjHRIGFVYp5t2DJYSnK1aipXUL7Ig8RKf3nNRvWxy3VmNEsznQ+lNUoKfj52gEHl6W+8uSe1SdYqPvL/WgLYO0Vr0ujep0ZyYW+8gdigXHoONiRep5QpXrsMfKyXLI12EzyxVFzMr/UTr7wvzJBDu7PNxGxOaOOhPXUpk0yn8Dw/UcpL6o7aoPo9WdVz6nk1stz4fy0upymTgtm5c79AoDiKl4qZy95PDLp9GeazqgCqjq5HHSXFwmbgfz1dhCUeWet+jvPVEXko/q1fE0TTKlpgqhnfoVznKnLHImrianaZhCVJEQvnKhCJj/jk1gqLcYxr4hWGZJ0tKjOzFmyAvUKfExNVKyWKqM6HKLPXgMKeYhSVebuYw0Vn0DeNwi1Osc7WQGy7p2N7SASrJgzEZSJB27FHJLlqkB6k9KldC1AeTQz1VCI7B3zbkLoZCCm+HUxLZvnfRnAwEGx2yWMb4XliUlsqtWvP9L8STLr+WhLdc3qGEBLKtitqMqxWIyPy4L/lb7KTG5QcIbH+pKQo8IXQrMgGrxDKvTescECwK9j4N4U1X6BGyVrnui1vhHVO9GxjEMDYRyesxo6BnJIwQK8G41QkBf4WccC1pJ91DE5Q8FipSfkKZvKoKAZwPE27ok6d8VSFM3KNpdUKjiwGXfZ35uiiqxi55yDFIIqXplUaGXtg00IpbaBAVkhmOMSNsGeAVndES/xFGRM3XkEzF9FbSjImIE+KWUtOprcoPBFm5EQ+FwU1z83ATBucDMLuU1QI+YjKF9TTCdsDbNZsJ2MLyDzX06tuRSoHxT0Zn9UD4H0dex6w2aG1uOQR9pqM3jPXRLcfWIXGVPfHF7swae28hb2DLsTsG0eXqOg9VE77NQ3L9MynhWmWjyi0HuhLwhaH4CoR0POkeY87zAEnN1zrJ7TKvhr3ApoFVs+D+EkkWc8YiL69xmz4cNFmTMVEAuhikGHnPBOmVu22oPHSo7d1MJIfkfV+flQuRemzAEPfBpx0kCRYGRTCl3+5ZNvw2dzOEf/UEyxaJ2JgjUMhCbp04WLB1yFaKyhf25xumQcRnAZkcZPHmSjBuKisaNRHx95EXS2Yv52+U2MNcdzH7HtRrr4d7LVV1og+94xJmpvXuFS5jTTT0foaXtHQPSw3/dTD+4f6ERBzTfTIXc8U3IYHw03RLWF6IWTpvEmH2iX4xTRMvAXLxEjFm39FyGG03dHxwu7ZE26C9ia3250xLMMhTpVvoPv1mrvR/sCvBGg3tYuKHf6COdI6x1PhmKghMmAs2F7GGE6gD0kC0LV/487Cry/egcRUTA0ampslLbEO/sS+fOhxpxTovF8h6SAofpYy65B1tkISZoY1zRzqGIHU6zY8dsGoDyV+fRhsWZtnWYnXxjIlXOjbK+yUACYoshHl0CyWB+5qagz8jea2u1e2Mq0pLw7bx6Kz2pNQumNTygGDMvdtnhlUKNXFqKK1VhYuvGG8qg/ng5Bz35H7OZyDWqTGcVGfvtMGIF4KhWMUYto7s0HAEll1Q0/2j61UkWFefK+kObgyJLATrczFnDdAeUWzJF/ND+OJj5a5AGNw8R7KR9cP170mpCXDyzM7lCoPROZgimvoEdqQ6OQLQ/TB2whmOHsvTnL0YWM7qamxM1QsjQHhWeqalmW3HnoOTKx4cKMR6kO13Y7NiThZvARNWu3J/Lffwqc2Ox3ni9ByzohLaD/wCUNPdSVwmKPEGNumMqt2wEeuHfFyEXkfNxslO59+bjnHcGHyDyT+05iczEvu3XSjTSEU4OGv3CYZFnzO9cMnFwMArimLpN5VYLlhF0SB/EMdjXNS2uEL4tuGzd9XnrPiAMooEl9ino0VrGH65DP2kCK57kKd0rIcks4qdaP5hsChJewTfFikJ8W+L8U9GrcpE40kJa9NiN6u8trUleU1RdbJKfchPn4D1dPg9NUlQW4JlEIidb+e0L4CLL3h9h/nQ4W6M1xz9Ty5FSVYX17HNRhvTyYvuirTegGEfDQLmQLmz/gf1juxEYZFZQBLz/VRxoSWTblkCe/Ldfmv97vanZU4yL38P5aHWvpet3YSwiAb/Qf8jMiExqfztSoqRnxbr8h3f8zcI5oQ5Dl64MU9t+oNLHVSoFuAjyifK0O3nwYhjN6hUyIgmmxpx/xXJzeeIKKxJLgDTT78ZLgDkecRumFNqBV/xaJEeZwRFJ53pM6Sdba0eehs2dVzsdoYU47rIBknT5d0YMQigLRy05ExgeALizI9MmRgP72G+pkKsqQUPPhP5+1TZGhc+33ThZnjWdC4BF700sGxh5AJggrpaBi4qWsLiOk5aCt0ZmvFV1FvQIOowwAXpvWLpjSSfXxvj7wF+72pzd7HI4CpgbOcVIF+OIoBKVZvyr+0eGiXAT6KK3AnC+dTwYxxZab6Hi1AlB2PvbxQeIbGb+3JPyaRdJmHVSPOeW3mMDs7HQgWmrB38Ps0Pt6waAO578l1MfRQVyHTUfneNVjvfXgYUj02rGVUL6AUD9WJl9BoaF7Mr3EjNJfmpIzV5s5JNRgl9O32lOMzCy7QKBk95vhuVdA/1r4dPyqbuv1dbdKHTFBhp1vuEwftirT4QHZ5W7FAXy/yOahRwGLL9XAh5ARX51Ycqc27FF0d70wLmr0cTcMYN9zhtko6XtLGbsrTADyRDdswD718/2YGNDOK857upXuHDqwA7YXLHfaOGFgobUSj2l74DuefdWamTQD/Foc+Ep0UtS9JJsfSDjxSkWQkoTelXdtzlTgd3md5A2ZqMiiYXBbfno3guBmxUuMGKYJ+gciP0fveUOBGjiYmrBhRaHcrS3kPuaQZr0Y/zziqWMTCTQPTcpN+H4B3AsyH4hUzI/98WpkPsjBaEu9Fgd4UWa8ZpgkbtAA3MGj27qOio7IfagHoYCymKt3RKWGDaeD4ZaqU76tuhKFGsyyujnOLpeD8Nh4R57EPdzchYFLGAJ2vAzf/Ctgt8qmbOCAxwsxswf8VCAkmKh7ixULeRD+HgQEoKsDTpSWV7gUAaNLhp8+E3rvFweMaS/UkMPSK06mhkFWBxyvK3hqBgmwRdi+G1nxAmgSQEsLyRSAO3vAEEEcmIKxJTtELiGmOTGmYLXj9qyH8ee+o3beXyh9MmtBB2rKFPBmUjQiYByO2hI/jxy7n82M+IxWT96Kc+lzEpj0gkffIYEGSMTrzIOeQwcsDX4BXUXXH0CEStoGJQFIYgSzy2wLuRbnJVOPG5gdebGoSZK1CyqgFsuwnhuUD62NWz1nh2VQMA9uXIsXFV6CiH4CXM1QxDzhUYTEzPR2OEE5JvOyjPN4zDLAdDzweEoFmbQVayALgyzzfOwHxRaZF/+9ckDeBXH7gfnU83IwOJrDPxBukZIR3jpRFHNR0PT6/SjHgv10K4m6jMnQLF+bkYpHSsAlAzqD1Pdio/xIWCkhhFmGivBkmkqf5xIvsGxlc53MM/yDT3YEDi5ox/+ikafm+EUufbrBmdPlQH6s+uLOlF8nhqSke3tVbtQgm6CubnKsnfc6Wcnn90DezPptHFNDMrjrQOOFYTaAOQarVFr8nss5kpae79tPG4SRY5jlUElOFqyFvDCLBVICBp82lSq63Pmqp+YJJS5TO7MHg2MOqYmSt9FGucs+CNosNR4n4rkrHwgA2ERaXJ4r9pG0iKBepeXN6XybKq4gwgMlXWxQxdFexCUAC+lpSDBAwVISmqhjhdB1mBMnSoolj60LrEqX5o9ynNl9yZi7hwPRRQcDIU8s59+DimIQnAIGYD8gBKD9UhpKYvNkHiluN+KmEBMpLPw4ERhpIViWYQuBqtr9J9FY89Zpt9CaXvR/0alI5lBfxFB1IJn7PgeoFK715lneeekgxjLBJ/xDVGeM/3zj+0vu9SVs2J4HfDVn1bjGVNmNICX6tCpyyQbv5AoZQth08+ssWkq1WqAmCCstv4RhCyWl4wejy6K1sqTREF6guejhrFf9WKAlBK9po4+PFFLj0qKZXVXKzWgGCd+zsFRvx19DaKrAeDUbMfsTF2p9Tzir3tonRGpYHRDJXh4RuExNy0inPzwEjr+2I0eA3FasiXj4M5teB+PUoDX35IIF+poKHqXcnHEZJLkUENX5G6oSe5dYr0p/qzutVJgjKRxcfRtF6AkFfdJocOoeiOGONP3U48wGojwZNSFPljjn5vgTW6Gc7NBgXOlLe0Ra5WXhPb+XsJv2KGtXp0avYas2cUWJFhNNWRYV2Dr//bkeNZEKwKWh18b3jHBMVDOFeY6AErRowhLyP8NqlWPg0fRAfiGg8+4ixLzda7Na4joTFn2UhDi0MwctV5Ysw4kHp6c4rcXeJ4zzbLYpOqIzZqYkCvcS5rMbLFJx3mN/HWAhkyDgb/LRjWUpsOOvPNrEL6ARS65/62S1kytdaAG3aJfPoDwMXEcn0FjjCNYP8QVvgGoA4yqwWEgJxJ35umeRwkXEGyoba7SYPUdhJJTJgaEk2s6Cyn59B1tGviaaPEbYgOaw0TgdRpHoJRRu1JnbidQ+HLcVjlD2J2ztSf+sSXtcCYQLhK424OxNMDNf4YbSqGxp0wnDnd538w7zA5aEWkIgXCPuVvhc735yBRw69Szga6FOgVPv/090OiyfOyvbAkLRH4EA8UtVFt1OhNFGREeChWFDidE5qLvotja/40vnX3xM41hQlN9i3SqtHWQmCkVbfQdhUth91VhfjLa/EG4wOneV2kbKPSjNks05Kz8j6lVTmID06cTdtSd5eUfGz3fra5PfsOuBUN+Gssc6cS9RS0zt5ZkvUqH3hSsBdYx9Z+cWH9t4//UZyl+1xG9cvKLm4zB8v92LEh/Jz+jfoLIRwmHQEkCHbYxWwDYdrDVJttqgVRuBO0AhEyqO1tvqijxAHvxro6lpcxmi091fEVnf5w8zuDvKIuHMiLjR1HihSnaDdOZZtxhbpxQ34Ispc1yXA+AIFPmKJGanFYZLS7OaGh6FYuja5DVxRL01DS/lvtEw34ahGi30M+kpb/HSzIsLPRF6Oh1/oo1X+46r2EVFX7Bi0NgeuBAmz2sPxOv5OdKiwSR4HorFn8gpQHfi7PpceGmeFd8BRGLbiHe5gxYHvedaF0IOivcWCU66SntbKXKd3WppQeu98q3WonKqBycIgZulybcQ4UyWrcZeZiS68pG+AjK+OH1TBoR6fdZXSMTyUS9yHb6MYinsZYV7p7SgfLnO3GWEWznZ5iWB+wJxokge5cas/exXdr6caXsANwnOYyQE/Oln0vO6a4nZq23K43w9EVHR2vvzOKBYpMcWbh4Tg7sk90jsZPu8YlkvylG9W+O6atD7l+FKrc/DAgnpkXGc28btJ7KokilHK1iK90llkaZNaYzYq+tK6WjKpvLBcgSlaPDXpEQzRmObX+LpcbxkasbzZbb/ABCQ2r53TjtD/J2RWVK0QCYsOyMkgbV4Pqps4k+3MEIMu2X3S3C1noAhXTF/EJ2gLEnZwtPhC9doYouJq4wU8ZB94IZul3b6/p44kBHLA03AwhAYHeNecWi1IKBqQQc+7kn+jwDdfPboBj5HWiTqHRxGgstrDhFHK5NTwX141D0G9ZSi62cq4ixlqzGbd3OkIfQ43bWa4Eq2KBIgWAbRdxtF9oHc0ZiDIOURINl+qOGfDIABgsyKovjGRyy2JenGXgvVmrL6FOZUaYqFhM6urvXBy3xcZEgpqmN8XTsyXmXGe0JZhVBpXTCCAVgQS4C48Xcu1lVYVFFOREOwTfFZPHWkdRyPNMpK1TgXaJgMRk5O0aZIqoHjWZ4Dl9hqoANxYXk9F5QYTLbB6kIVgTA6VNkqVct2DfeGXuOszRgCuecBdjtpBJku1RDGJcUZ1yUgA6Hp4ajm+ZiZuVIWciBGkqMRRSXWYgnKsWOCkfk6QjM8XJC/zP5UESIiCx6DE2BIBwFMD4oWQd0lh/33T+etkyIF6yA+ViSUoQUu9rw1rFcdGvXciKvoycxb4GWQFMLCzQeg4Sd6d53/PnL6AvPRe8EAg+9aCJMpuT0s5DErPxgaXJ09NmXXiwiI4cT1no/okJVyhsVgkI53XkMLaYI45kaqj91nkpg2oCIia8apVRjvPk7o2z735cTRcXxuaPUv1h8U3A2nJfg9rKlVX9T/oa7BRrUY1i5VcnPj9cDeLIcm/BdE/82+nT2YgHx36sBpL1Ov0D4rV34ldPoTf/fTM3hw9LrjTSycT+LxBbxZxWxL0tNUuKf2200Mc81mJ0Jo4hepmJciyDt6WilCUCbhbUAbMg0kbqD2sLoTte3L5tVAUKOQz6pE373ftBf4WnAGx9eeIL7LV0E6nDRxVryGuDb7NuDAXnWCICqvSVR5YfAco2MTTzCbKSmxHNWkcWP3TSGPI7DObVScrAlBrwuQ4eFwfauyjTM3BUs7bJV9xWNv/4zWyMKCqzSlactZmo3kDGzD0rE1Y+QVpy330Nx7Q70qaQz4NoFXxgOR+5C20OLE7I7yd5Q1sH9ua0mTAyejKzMs8ncJCJxTUu99N2oBPikv1VWMBlu0xa3ohZoVQ+2JSuUHDDEAUtemeFJ10pBfdDnfsvAXuDaweNMkzVAhjt0ujgblBzyNrVAYpSrYHGcS/pjDBt2X1+fjxlo5FJl350zH3D2XNvHT1zEi8h7i9GmVqgOIgR6vi6Rnfvo/fkad7Xinlf2p8q2BiBtTSc4YmLPbTsHAYeQDK9nleYKJa3S0jDJfM4Nju3F48O0PWDMHWJDC9mYS3Ggu+vZSGnjc5QOltHwmWQn+aqbJjvC1fRoNgGFoiTcliuN0yDxGA8Y25TL4nYhEyRuJ1Zx0m5nRhZF3qT547ipTjDZp/VDRYm9umr6xisJ6YTah43Q7/pK9SGUC5hTH/eMdyBmPrSgFEaFsf2Up11gwQWvkV6pupUucnC5/x4IvKAIroZsAouDWQYmM2gUK8xIQZCW0+AQHdCjQLtlbMOpwdZEzzzu/khZXV1ZVDlMxrg3MM1dMpLl9oPFk1xEaXdIDE+OL5GzbGBvpXbbqvExfjr2TdvhT8doYFidUlIhiggPYGVYtgWdEfAKlLCA5qYs6qLABDGP4xwjlILcdUHWvDfUmbzWBiVYfEg/mp6Opu6S0NQXDZ+NjK4c8SrAR4aVspLh8hmEBdGYuNA65zZDNgOLoWehGUzoBg9EH7/Ak1xzIGedr8K8PbcQ3I5chTqj5Kl2cEGoXbnUjIdmgldZ0qNGuY3aV5fc2ImPwEtleel+cschYWrBV3zMSXEO7Wa62OG6oMIHpOhs11zsrbRtRi6ttQFZ9A6bwENpcz9eLAkhN3HRYUu0nAv8JKw1KPKzyjyaTK3kM6rum78ixlmXH59hQp8hY/aUdO/EliZngpojBUUlHg4mRjXVpMXRdk/wZhzMW3PGoetuUxGKbv26aZs2Ds8hYGiQ61RoiEGookYM5WfHY1BOrwahYjqwD8n/iZvvObSUvVn8DwuebPeYq5ciEgkZ21CDAQSYpfIk8r3WJT3KN1CmW/7bOxkIFutP+1Gx40RDWMLp4vZbA2d0jgqDDihN0BrYP1FV6KqMfeVhaWqOGXNK6Q2MsIFCbrRmOJG8XNa41zPohZWFxxBUDpi1/DeaaYa96p/WLaLxzi6Eml/a8sCl8YLisgXXvXwjq3gu+QvuSLSVn9KSlk/yOmf0yYYPZICBZyM8twvFzgUKzN60TLzCLRsUHT++Uag57BeWHBJ6/pP5pRsPIPtQSkCLAanrwOU4qM6r7G/rixgl8heaGPmvVt8j8qQ+XVKdff1M480nkV5PslYTXW1cGGd9CLPU+TQrngHc3Jn/YYEZFFx0nWeZkwoMlf0E3HITTQBs3VswBrCRRZBQNc4yGnlYWZpgq5ksOauc/N+sAW/vn9IsV6h3Nnm3N+XRZBsJ9IYhgsgmJoDnnOFAGmDz2z6MVov4Ov607KlvjsdxxSveEzHXDQ3gAKOmziyJrEUX4uQ4A/LPFdL4zDrts/HLqYy7I/sTO7utBXAu4ejOn4PvN/YKFYwShSok7cGUfQUit9IsgjQuXfdv4fARTN4M5S2FkQNfEkcrQ7LMfhj+H+rtb+FHwOmFkyCRyqC2yEKhvxnBeHW/MQxZzf0UJ4vyd1cFNbq0KQ4Ij3o0XCohjv0pRerhysiAEj67HRBF+q4ZcupzSVdbOpTTgbfdFdDjMmtAcnreqFxTeFph4ld4emGB8WJIgXuBdmYka2QwBCbNnUDp8suz6rv0orJ3TrmNJ0BkQfQufraPxkZLD3Kpwuf0tb8nJB5kXyEgNai1BWo108MyX031UT6mjTkez4BXMRwPkjyTECZ8Svng3gr3760aL2VBiNXlGDODtVplEgkECxL1WIkvq18wE87ZEhPIhcqnpCsbG5KrgZp6LZBL+yTrhKtGek4VpHLgYdznV6nrfmEAsbo/ZZvAkVip7Vh/IuNbyBSm01vBwqfHR1z0FWOZfVAjmzr/LtbhgMpZOOBjc/SOR3Ep9NkOZcBC5RCUxfzhqigZqV0KZC8cwcoiLLrhrXgwvsMJMudc1LW1mlVoQONOIRCwhYkoQ5g6pzHvXt4qJoQkaQTKDEn75I8j4chIn6lGz1i6GRxfexe5gZZZFQHB2PcV+pvPNBGj4LyawigPHhaHiGhQIVrh+DKPjbdQjoRXkJiwwcw5uqrvylbZ3v4dxwvuQ2OY/ZNyQf/BeZ4rt+EZijbb8oj2/DVujH/RsKoxx217d8jDHQV7sZhbtCjMrpUoTsRrKo4EWEiZjrD7RcYPRvujOTLdPcD1ng2eKPsa8SPFCPjSDkSRp5Hlpm3uzuMHEZs5M8IR66Z5m58kV0y/Ujn43UkeO9X+ob/HANYznOTB4WqdREJinaLbSfadXhBOUraqqPAejS8E6xqcFegU1xEqNUl75Laka08EWemwv4sZ2fT3eYpr6tvf0pymNFHxetfPmdvFcmjBH6/Q8LdpuAZc6PJ83EeQpZszn43Hg151mI7EpO6Nr0I3higxq8FoyVPlWUhPoVJpQz3CVMu6YOzVq0zv2q9cDnYlsTLiDPftwBjthZFg2h7nSq+0NVrMqizR3uQ8CASpSEIrJexcUxoNTGfAQF8zdoMC7Q3xW68VbJu8TCbhzh56UHMsHCSiGHby098j8L2SyQBgDu1M8DzbGEJ0+FvbL/BLm0cQ2UZZZzvMs3C93escfJQuDuM1vN15fEq9SzsPU2WpW4Jm72wflJyl9A9bspGR49fE+hxMOAbABcU4EiJu3TJzBqkSbcPvKbkbCi+XkNEzQ1GxqrDSi4SV0nyUplYXyuEjqnXaOlxc4fuDxXDLDzXvuSWJowX0vN2EzBdYRXgS1quEQpPt5oEv314Ul0S7g2Fh/M8Fre2XtOI0iJ03M2k4W4/tTcsg6DSGA+lYyVfhqmdMkyClC6KwrzPculunx7wCCXaBuIEKRQ+jQRfKTDFbn9s6IK36imZYNR4NQVFL9cQr0YyzBnEoOJhMxFoZuBaQzoVzxyT4ngh3mRvsztcIgGno4kOsyiDa1EO8ZQWbZIEuFKyR4CHV5kSqDLahFcwiUA3f58QQ1sTTw1Hozx5Usm8SVxNWZN/djeG+NU2QN5wIc+VI1tPc6om56rlmkeCgNybcVSXrJ0rFg7Bq5HI9YYfn7EZd95csokzhN8Cnxe+Tsw7HZttBqajyGbJqPgVKLjvA1ynHnqId1x4T8XkuDHVspkn6oT5J8OPu88gj0dM6p3z6YLjR1oP0d59fd4wh6y8EZQ3EYfXg3I3oSpQ2yEyqetOn6u3AOZJcPQYKABex8/V7ZBI39ZXDjT/BlZrULB+eSOxIOHpzSyvC67wDe8pYoLyWEOj6H0L60q2vyEMXrBnXTRPCkhmY2A9u3VvEqBoz//27Mf3G0X7wnE88V1ezimS3UJDSOfO4OiZkeGjUromeYy7OVtcbSiX1cvYNHXQKQMUbuIVc9BZiRPZic4voQ1jSN2R6p3lIBGqHLe+559xNkWQQWUS+w1cOUvpbEPAhUFn4YMqPW46juAcrpeVuTtzRRUT3XogW0UU568eg3TpQ/f6rxhDDgAqJ5HKr5rQuJq4ealT7t78jR/8fZY9I9vISs0gFTOQnjci41yT24Hi5Zz2cdqnrQxDzzg3z6L59htK1IOCmvAOoWA8EJaNhZj6UxBH38HaW7DFfDPMGpF6cMZ1oC9zjCQE0A7TWmXGHG1HQzwWMKtyw+SrWbBdlFRCFYUdteeHUl3lGIu6pIHr5sW4FKzFOTMzqC5BH7JXPDlPYZ2xJZ14kpMn5hvajpGTMwUGRSumK+28lmOTRDpU2Uv1pYNsVpv6i4wO4dJEuRVi00VpLyPgXP+B4rLIhom/iV75RshFQgUkXjekriVpE86k1ZHLHsrNZGCxMACk6HwpZH4WYxoEvpIto1K4PJksjlrqnpP9enuhpJwailFAx5ZbQzcRJzOazaYEkR43JAhp8vdbGnzZFGUk2g+LLDYiuCzRQYhTc2kEty0c+lWvaO8STerORYPNah4rhsAd8LbQ9jFX82PKFQ95r6U0ol7A/HJU5U1e9A4Pf1VOUr+DT5ogv8MxNOgOIrB7PDTNQU29uApiE739FckEFwt4OSjxW4em/4hy5J5YpD4mB9F1x5WsUTeVhB2kT5xtZ1CXOr+mmHw9KaDIzsNMzIiCBTuRCr+ZImb+M579iVdDgVnZjlLHtqETHqXXFCON8nlaeePYMj007UIqQ4JQ0MFs6fPakordmE/cPG1mFrf+PVr/IbUvqqZUrldILjWEfM73RSjDat2+rRQ15Q88bMgPkORFj8BNcxdZTF9gI2sAgblqwtsC1NoKwPFOToLQEgzYW/i+74YWu/of/j7r4VnDE/JwSu+VbgUIxP1mLUqn4yKFX6/QYm8rqfkP6ll32K8boTuWdFZXXeO3VAx56OL1LyIM3tR4nMgnek2udiGdbV0sKNms60Fn1/mHj1AhaqW0AzvUqFwKyxETQVhtn6XjZRyGTjGpaw7ArzevFw3r47dr7l1/USALoeEuL7CGJLDkgX7jX3jHF5I3F2u+avA9U0XJjyqSJxxiB7IWNsEDFHXeIVQnpk8VlrbYoGStviNK+DjJ9V5GeFMqpmrMSKPNsQ8gDZ59YkT2iqmhjGXnikNbPF/GGaifDYktaaasT/6w57U2DC8bozNsOdbjI9b9onCFjq7ZcD4vTWj1NIOXjExp8NRer++3kEMeuNWSxGhmlOWVUeNwAsKR1QmTOQ83HjriaSC3BdlNbygKzFG8/OW9T4UHco+Sdvh9LdeV62ob1zauVCvVuiZ1Teqb1jddd4M/GIJj+/0L3T3BilsQ5EK0TE+mTUwszyerBdKU4Nx8j2QIv+U9XtKpXGDdOGeT6EngafxPnAIPh4VhKJzfLXQW+sFBw3tWLHClzlwucJVe0ukQvYk+HzcxTJmJjznuuDdNz0fbDYff1Ofr0H6fizJo/3CnQ58GHmQMxikce7QnPzaiu50h5IIL3cwq4CQxeOHH5rLq/IIReAhBvgHCtxBTqIlhvsncpCiVnyyz4sTowtkyUk/KBsc5M+z87QjhTpTrGhvIrTzCyoWhixR4DeUsLZueIhTKm1JU0Fl4Zzrx9sdakgDFLR9QeGf5ZXp0ZuFFmtZyTl2Ekie5f/beeX63WfNpM5sH1PwbK40GmYC2jbFAhgfLGRVccP9nrnkvslfeCzGlKxJu27HwFnZb6SWS5PSXSM8836WGsNY7T7vxPZFZ+3kmHrhxkr8XovI9/39j5v1/Z9XQM6ILj0vMACdIgEvob2XQR/AWdlvpxU/d50Vk0TNbca7jhZiH/AoSJLLCvgYjNQQSvxQNpIUoYBNKPM8NIrRfRPYUefDDDg5hCCSNYGx8kbVRrXSzNjj96bz/Nt0CYKU8Pi0atxmL4ccKp5TXT2lOaZDo4FuCTc+chlC5h0D9aNo0wk+BKAAPQ51QnyEJdqn+7tKeC5+2ODbA3GIgo4xjldk97Kra4CPa55Pjt9N5nyaKKJLeIB4rz9imsNsFWdy6cpbzGopV19jKgY0BgbUUKFPXrG3DwQ8dwc0V9YHPKeAKCz1EDmDjV3dSv14VXbQORs4Fr1BgcTQXp1MR4PZ808h8Gpz0QFv7BheZldJl5Xt2Hg/sM/lw80pBnjMFFBHr+HT4En6Ud1rKlfQibxGb6/bU8KxbIxG/lB825ANOwHsQ9nnx33mI/GprMPgQpN5+mppOf1sYutrpgiMCH05mzPy8c7zDuVY3qq+c3r13HzDmCbYFa1eBDmq7PqAxvKDo3cYoivt5DaZSWLk3dcDlZgPcnj7d/zHKrp2obB4NQ6eohgL76oVay0vNRxipJPM6RZdbQOFFAp13Ou73F66vkadzbA3xlxFwwpjolr+6CX50EHNhSlOsAWunCX7bU51dnilNyXAhXcA0KUMqOsLxusI4GXaXUeXkltAVOr5YH3QtMupEXaXb2xtFzsWNxMsn1en8x6i/MuOssVC8PFR/uJoeFC1QGs2ISVHzJnoWOAnzv2ZHgCG/djufn+qYF5yhE1y/nFM88MBy/+Yb0aa8ISj0jzuDo3NnjgWhVW5ibTKCAk4pgYtHnjDMQYXlrUbgR7ZedqV0D80Q4+dMZW8+KIO8wOZ0HABwGSInHzSy8lMnmh+rOPK0X7i5x4QI7He2mcCGpgeu6Ge23EMCW2N7hd+n9c1WUQXCQ89bND3jiaC0gPd7M11i0+gYFRosFqvEV+ox+WLhmbuZT68cw/tmIirCOEs3NX9B/qBu0RL3weiytunju8V60iSYG8uvPtz7lQ9FAhkd3gl5+qEmzOzFHsDV9Z8u/nD7Y9rexbTzE36orzeWutb6QX/Hz3lTlP1jSffhKEJ0XWdYB8YQN0F/nRQCdM3DeZ+5+M8bfKjZxsZw4zxezml68SaK1eM8Fiqe6bDF4hKrSqSL67Cc7+PFrWP772TjFHzPxl3D3unS+9n2P2/mdV6wbetY054Cea3qvNMWDCrgZa8mm2kUagOB2dgilg93ZFmL+G1qUWYiPi8ZjYH3bNy/ZuPeIa2LYv5hUEEpeFF4Mrqnu/pjKpJJ4XnTPZhfJ5rGXTX90QVP+i2Yj7fJP1mbHlN5pJ3KDr1gcZQOH7fwDmTQdxCuQS9M1wSlk4QyWkWRVfIwyuL9jz4InhrGkJ2tbpPaw3FnqW10QACvxzbiCyxjI7A5sXCRGUOMLE3hjmzO4kUcCHbu8JEITvzZ+JP2OUjj47IMl+J5XjeQMr6r9ryd9bWYrb8PCEho4IOuk+2cvxc9f5+z0XvzJJyMFt9BCwbdMPuKOHevlJAoh/HCn+7/5ILXbjY1eH6VDcX18YPkGi4G5F0GBQuzdJdMXggy60Mxb9gGpO90Eg3KDPxaG9rCIB4aaP9kGXcFf+7CD6Mx8m/pfYeK+su28AM8gAKeoJqrzNNitRQixbgW/IqsIXo6A1me37pAiGp7s1MTMfime0e62Pj50aZrssRxo4rDn2E0WnrB+PdjBcFs2KKD0jQctVYebVzQjUcXjYMF6jQEf7NnaOoGPeJBj1wJPuoBHU0/rGMt8tO68e6C6H//c9L4s/EoiDzay+KWoJrrqcCLOM23MbhDNuUF17nxmt4eDsZVtm6zw1BEFisMRCM+EqMnnnza63z+az0nt16mhY7QSRopoyfpbnZNuiYLuRfIIvWUKFv2CzdjX3XYi7yDFuTCvtKqBR0fZFlJlHkbBu+2gjezLn/yZHR5tC54ri9uX7WnWlhyN2GscEVG7qPB/U8+og17znjjGAjBEXloamV38Vy1e6pDAYY8mg3vFjqhQj/NFRiG4+EJJkHCnusXxgqv0XktDd4xpLunZUV2wwjPQD9vDIq9dNkJj9grDtQcodr+CD5JLD7SJZgbbYTos9OOMnI5f2/nhv+WREXz5QoEOpqAs0JLfxkCdevx87vmRRQKIaCjCTgrhBACnBWJIQsCnBVCCBkhoJHquG1gyTkF/GoV4FerAL9GBPhVkJqTxI8e2eTmCHn6/fRyPB415o7QZEl74aUo67t+GF8j8JAPDCgtQ6r8+n+vuzD2KKiA9SdEvOhiUUPvxQfRRcLGGX44o+e0ojtvPAtL0wueroKbeth/a8t3ST9yDRk1sYeHEiVfszBGXReekzJxr59Amv6BPYrK0wVB1q6xETh7ImyGDsbroZFkQkLwKvvYg8WD5atrIr5GzUFT+/WElPFjHR+/zfrb7NO7aac9NsYaBzT1qnZPhXs9vXBtqFwl+G5jq1RSMavuZCkP4vU3BePHj05Go1MaeIdoUXi30OEShWYvuYUmYq/YZs33eeaZFyjru6ackeeMhvoacqvd7WkDbpvIuV9PPtmUuSOBWetEpWkXh3uESBX+dmHIQZ3+tyMdk7Y1EpsyP0M4TB6xjes8oKJU9bBsVUvG7jFBzqs0Quge+KwWbB+bS0NGw9AjIZrlXpKmjj5JRqy0uD1vQxHQLb2MGIXsneZ5j6i+gD3jU2OjtrNjNqtq9OdqA+vbpxvTcxTlx4QfUbG9cHXZI4psT8u0VGzaINozRMbjwaBGybyWEJbZmtwhRlw4xBMzTrN0GOeKPrVIyh4k7ZPTMh3HeaFKn9NpnL806Fu6HOeWZj3gMS3Y7iykogpIppYi1POEqR8aCzNVdjLzz2V9Yo7tahqZIoUt4jIsD2t7BLSYUBqn/uGXbReEFIExgZHBXxePEqNihTqMg1ftrPd4PmFFAzJ0lETqwgWiZNoVqhgcKN/oEjXg9JQ2Cv1JZkl61Tnlgf27wwgaVgN0dmnq2BWFBKbIYrhHUW558SwVvIVaewCOOzE9zSo1U11DPvTBUJ3TLnf4gIAIta/ZvxOW2wvbJ8MUM5yDAaMXuVoYvgzH8rJP+h84VnI/2hVC1HiA3HjkCUQ0JRUSyNztzwLK8XDIdNhzChclfYPjW5lvCZYWjEPERaPnHLANrQNhuznbkcK0rZgAYGfFvXQY54o+tUzKHiTtk9PndBznhSp9S6dx/tKgh3Q5zi3N2FOOCjKhI8J3sSmeJSP55duE3wnshC3lcYNINHbmyXQzlZaSAgWdWGgMTSu6BF9jHZqW4JEiCbK/kNCa726WsmLEeyox3IpRVYZCN/SCoM0UF3JkMym8xfo7AwYZMBMmO3HpAQFOsyq7SoI0EeB+4M9Nak/dDTWCm4ulXlOK41pxe9midn2UftYVasNPm50W0ev992T1sbT6ffr8gFeQdWzIP0y3S27/OH/G2hsmba2Li86vZ4tvgtZ/CM68JbIOYGKzTOfwoLxSb6xTpsHPbHHcVrVm3gPoDOhijArh0XZJESJsrFZqgNq46rTneeFEncoizKWZsdvUWhVBWQKuVjJziRURUA4dT1iLRCbtoGqGCFw15WbZDF9RbxrhhTkdag3g3fS4RhUsn/HjzZHVpzh1Gl3wimnd+64KFv6XtRjs9ctwLYZGNruGVjzrlyglnfclRncgcTJcdkF2e+JWFvhBpvA3VS0/QLbdJHQtFyCmtKrf8zeqG4N4anV9SPaaXeqqIrGkuEyxLpSELPvQlCVphh8xydGaOh1XSm0qQ4stEJgADYjLDIsNseiMuB1NHtb0glWQX8CgWsVska3CS5YuyKlslbVE3UciDwM6Rt54tlLvWFYsMV3p7h8YMut8u7I8k3wDXKhASDWll0Hg0Hk8iO2s7coK56XdDK/LNpt7AhTVR2wEqzgduq4MC45R3e3TUy5axzhqrA9irmbPCHYLJlCKTaUqTdLrKCnVYbHeMzVQAtJ9L0O7s+YVkJUigcXa+31Cu5DZdMDaXmu2OLCx5Q4rLyF9FfUoAkxXmYqjpc4dzxSAj2FuBzsBVIB9SnoXK+4jhk8LzOLQ4wuNbPAT9RVSgy7Ep5hN7qjVAb+sE+MkGxUr8UowyqpvKiM6V8R9xhdDRQutsON4fyg0kr+lYiEAM0WpQXgXGG8CKPbbNHzsiPrQ/BCZepOEDSkAPL2YselsrbwaVJXTwK6YfiJYryjjvNTjbk+n1tTv2xm5mueWiYJ9pkVO/66xNOcjNunp1skXyKQYmsBW3/HGqcXzffe40ZAlVk4fJirYDcWGLaKdnVNNHITU+oQqzikIvGXpxG1pj2/45s1i8OtIohHDPOzxMK7pmMBL3FwZySi59kiVKkoa/Xnms+VuOc4T4r6dCm3lsc8jJkcw/EAYGe2w8NKdgW27f7G2p8HFKmpnbCu4iNW245jHJ2i2AWlYFlRp3CqvkvQQ8TJqpAd7qWZLD6ord85UaWYSQn3awwZsNl3IFI22B26qustPg3hYpIX3MbugfZMn3isExYieSXsa1vCyrAtkWm6J/oelHLSPSnnGx43ABQXH/RVMmXJ1OY9cvB4kEVxzptMR59DI8/VARHtrZPm6SHRb5DgNqcNNqQzVkRmzUeageFGGCpxdfphilwxa31+iZXdaVp8iUJREaw6urDipAmTSErbfYSOViP0WSDzfu3771sI1xQjGpi3Gg3BnuIhGebrqAy0IQP2tmYwO2G1iBEGb+3DE+aOpBJEQPwOWaju8+2rQ/KvjunqVm/T6WNVIwaSQ8mV80TD7lOU/14be80mSjmNFwQcUtDg2mn9YKlXzcfY2gEwBPhblVT2I0cfDsbjlXZQo5HKTE+3nCy1we02/MYXXJUpuXbcUT33Hzca1ojRRMddxke7xofm22Br//WAOqb2ZZ3kn+7in6jrEiHy5g8jISafyoLFYQPs5i1+Hh7pnwfNfaf9tuqXkzsj1YiTvf7L5y7Oku6XX+9Un5eZqZ1l8H27q0stpYt2MMmM3sxyXuAQwzIg3CJmRjgsI+cRV/1ERISeI4uEiZHhu5UWZYuKUSCqwKPUTDJRiZvfDyAdE9DSGGkkLnpnar5W6uGawB/H1F0DZFd+t0XzraAPGv7czYvtQ+xvH//N7dL/p74cUh289iccr+b9CV0f/i7YcJ7Ou7WgqTOaUaOV7G/LK37qCutAfYIugzx6yyMh4ormvYT32X5I+uU3BtVlHqSRpf0pXdqveZpvMs0kWvShxH5R+MbHzLHSngxf3Qb6CPj26IWH1NrgRruGXl4cKYKWcFj4Y3oMASUxBu8CpCczTbzcV+nNJMajBUhaaV8kiUQJ8/VJhtECj5+lsnfIYbHBoQD2qe0GhrDc/kS4MSFdIQ0P5JW81TVOHcaFf3DjJYVL4MyAFv/YnCkOF6ZHrhXd83EugIjSTYXTsL/vhynjSbyj95qxncYxUzGbFeY6Ho7Q68llk/X6ggAbySKG7rxI8AGfg1JfBeeSo9DmqyKYqyV646E0XG6+ESti64sqPKAWXShtUUkwJPtPGNeYBKKw5TtfZxPrZxKbmDqX+N5TwtHSkYwAb02/svMzAb4aZuMJHr6FyYu0yRONyfx+3wWN1VAu+SvIdZsPpBLnt7uW2MhUVR56Jgy319nhiBZy98op5PLGHzXQD+xjxO5zvs5VTYMGWoNipTS/RiDolHBnLjJ3PsP4vIpmG+QENchYJGJRGkYKYXiKMleAupTLF5CVu7t/wic8Cu0SeDTErHyuMU37pK0pprlqg6EB/6AYRyTMS+ppMwkn4YqYgp0NSt0HnShgE9z5sEKqnzSxwXhXqzGzrhDdDA6B5eAb1uq8DnrkdYXoLq2hrKwHh1ZQJDHcybhxqp5b/ARAUE95OyYeDBfZae9mNqGuJ17ip0btjQmyQSFi5RUaUoYjNT5jaa/VI6ySyckzgmYi9qsslKNIloF3Gun6BYie6HZOWEYCwPWBvQE/hZqvInuWRLEftDEFikhsryzFiAIcQl9Hql3YF8RPcBySYvH4VKSPAz4L5lqefDf6KjeHxk5ktiCeJdr+kb9fVFOXzVIgy63gg/vwOk4XhUqsJhzNE/q1p6bgDVeIntabwV+1ZkqUtAVovnaUHdkgb5uzSvQNWl5u7fYkqkU7qJ+eGoG0Nfm8dUSFcNq5dUi0sonb+C0RWEcpHQDLelvTF4DjPE6C1KB5VEjiMGbTCKXyH0oNOe7oEalGkgDIDKVJgmki9uv7V50xBFaJiq9abBgWcA5rHTRef9n6jUGTXky12mj78CqLgv/Sy80UGPm10yfJ84wQ8QXn/QdzdzvwUywob09ndw+0GYMwoCwlUQBQ6v4SX4PVNQRI3WsptYZFZvGsFaNiUZ53Yp2OlncHN27Mlnk3D/QIK6WREpZdLahhlSKNdDdyNWFESRHTJLzMr5M6s3CZfZ5FOZHZ8EGDTVNKnuMTHus7reThgf06fDyjcmZS9AxS5c0msEOhYGbRFVgmIh1er+Vj8RDypmMOOLTCmAWKD6uMWzRm6SfAwMU96GKe2tzpi4bEsBrBaHKRDiwr6GeDDnDXB/u+rn6wVPGNx3p5eoY+St8NAYbad2tP/d0kRFaSIvk6rQLQbWttDJHFQWwqjjdhtI0pSiPFmQgyv1cyYshpkX5CV7RERWNngu5vLZMEhcsZrgnYSp3zU8hj9/9JYrqwh30Qgu2H+dP6vJu51I+P7xutDTDTg7+g2BrXFonAo4bgyzF2S4zdnhCoTQIdEpJHZut3gSO3xs8vMCo2Mvr+wP7MXGbYcmjFn7CiiWdvixRxU4tt3jomzuw871wuPALUUpAn0CQxguw7UBgBDaC2nS2yyrbOydbrtuxIbdJiA5M7CGpqR1JkTl/9tjjIPOim1I7mWCpVFSPhYOJSBRpvf0uiskm+oKDXTtxYkhqRap/ln4PtZJeFujwJ9awI8H8gUuoRqr0kxJQgFHx8PDYbl0JTU4Iuo8CoDfW0LzBppgruNF2PXmlX/S330W8jkvWkwV4W6XhzYjFaLArWiAM+VbEbTfr65XIaveYKVpLhEjypc9nFIAARynIIV6fok8eoVKT8gY1aOkpZYtwKiS/XVmOwiFhU0dceDO/JtPTq4Jop0gbc90tz5cwrKrmVI9fvlEf0B+OcJagxDPbB1i2mcB4x4YEmr8mXHcq5urJT/B0jC7kSdW79LJGc70QkxpW+rsLuFcqvgnhMIpj6IpNx6r0EbxchKsMgI4Pk5grcYTzV3rp7SgmA0sjIcoJYnFVwkKRIVriks97+yIUxRSvt8f+uWyg9TGjrdYYIYr+f0BBhhWWWpZNRF+rom+1HA5HAMT9pz18jsd8Fxkxnh2gHqbHetzLUjtVYkQ5TvygFmV+4hA820l3gwBQUMVhg+KAIZiGJIY5IGA7z8n1M94icrIiNw+kk4I9gzdTH+d0RLgBXcwVxVB0eydCGyBmawFjugZwPyjO2IsOnIcR04xJPIUeqQw/Z+FX+mWEbmBwpBg9pqoTQDSAvF0ikXBPqGNb49UAUoukoUzkU7IEG0aVI/1VSHNUKg4GIK3GT0QKjM4H8I1L9cGddjcyR5XSztpJDgCT2gu+W4kQ4wsYCLHr+uYlXBaBXSAOFNk8zN3GwMOBuPXOskVGBIxeir04pO0H66/blA0tlLWG1qkiegEMn80G7wuw1YHrrXgxUwZRWVAGawSRs8Ep0CjGHR0miPPVo/wfnFxrlGn2pASGAFkYllXII974gLtS5QipBpsfJS5ftCWLEwxE0IxUAqXLz7ED+Sy2gFVE5AZZ/pSD18jw2tY8ecCI2z3D8UAoLxs6GmO8EkhIQYgKrZvUvYC6yd9LV7lPEqkgjJuZs8GxDt7ckTC1iG1hzIHQQJYU1XXgC0yHaDm9cymftscHEuR/o4Uh7tfxFKWKAHKbYl0vNALHaWzi0ezAcL6vPjftXzP2OUzu3eGmgscqOaYuBSlGm4BJZgiYWwPY6TFCpRQjl6/QzsMPE5Ub3Tgy+PPJiaBGpVg31AJv4Setq7N11QU56DmKXeVoITM1qUhCjcLNqR1yix2RYITnPystENDbp8KdmCtgXjQSTrS92dbubIYhtztqi0mrcrZEJ7bvJ71a3AaxLoEoQ+pFhDb9kTxNbNGNoBWv+3sbMOvCYl5jbFpWJT41wvZI3aMYFCE1ph7t2CcSGo6R4s89DgjvtFfBvumDzmxZvXWJwDvreMxFZxR0BUJUaKQa0K8F5GdNv0OxxAG+gkIgUWs7HNysjeTu/evXnIMUadhKDraO+iQ9q2LD7CCqrKUZSfBYE92JbjhZOGDG47Rn5NlAo9E4AJ7oVoS/qBtX38Z3e6qXUzmEVWp79vhoE+6VBlv6XMHkbjrBewUflR4AhJhVuyc34hcFj0orEjhNCitZGb+LSM1kLdiLiChRXVa2HEQiY71++NJjy3YYUQhxNJYI4tFguRCD8y0e43ETQYOUtA3A+XM/tGpU2rAnwLnBZIXh948pwWto6lQiu89xZvCptMJXOCzo1V3UmxvxhFT5sy2vuIgY3DlX1ZtbN2ZI7zWhgcQoqc6qlLFcxD84tq8LqqUxGAnhW6gS6N+p0wrbMr2TbC+8Dke5MxRu8UxCwsRVg6OcGV4lMCPoXUQGJpK+NBhOuFxP+HLtSf0DETBM8cLqqCZ2NiZzl16/t/CjTubi/S31xRxtIdGW9g2UsYjSNP+RoYpyvRLL22c1bxk6+Mv63oRlgUTmeaL/DSS4KZqxXUrUg16kefcyQoXkQxwlDugD1UstpzP7u8LqPuUSmziyjfkulYTfxnxbKpA7WYsUjglCEIXC9MG1U1fQOG0jQTbO7fqDplNH9QJNuhPjNlYnKQYHeCY3iOYbXnr5ji0NMm15Qr5mXnqkFEUn2aY3AQ5yAKtwvyvMagBhfSuFUJpJyNMbrbXVeVSUfZfMwG2059fScg/Eyx6Km9IgxYEvMslaC8o/dTRVeIGby3l+8NwULPsAuEdIa1F32UIhY6fXQHCgkVXWBlje8Apdp+6GYwNj4jyCrUpDQOClcN4Wm0NQTRzdngDMa5jCzUKs+0W7wsEBNdjphlq1zPNs1y/MugzFlJhkL7yo6UwSj0Kf27wo1VwHgyDIo3yFwRPDMiWPcB/jMvuSxKudybUQeWg0OR/Vob8KHwThrTayjB0dglMsBb7bvMVEHDO2eWjT0kGAmJGEophY3mgVBz0QPEHGR5/QYCTCkhLREAjomdhjSa64qboOI+FkY+MjzGbV0nGLByVJqiCtxwQe3SM3ER6gnhLcrk+KJ5azwlHWsElKpuopCPUrdg1hgNkkhXaTs1JYsT4RWd8jSgp6ijbBSQtumUAMMhE3+48V4RwoStaIkgeAvD4LY+GH885fIPMSEoZRdi5lPxFpHi1DjEdvxgqAQAPEflVKRRfJWn5T/fZdp51aqHqcDeguQkyZHUGbsctmcpHFXA2vxj7QVuQXKEmFynIAOtN2PBCmst4D7OFZji/5gOZBbro1F8q5xGOUxR0gzcyUJmyap2Oh7ooghxfpdovI9lXRjQyoj7SlFkIzjo3G70BbnC6sRCAuOCzXw3EYHoRqTyzhuw087KSXlUa0FAwBWsOD+0aQStyevd2F5diZU6Cebe4udMcZjC8DjuDhPpbbmPUk1zXHQAauPjfP4BJvSV+wRfJiFhWH07K3x29+mtc59YrPjgqqqNCwG0PfrdABRgjiDLw/IxRWkepfSfzow+r+U0TNenWgJW9CfAV5cPqGYQYYD8utwyguiOoPhsDOXfi41EfXVKKlypjVR+qq43tZ1xFcUnQJKnF2yMA4WoXDEA1mBozfZZ2I0/jO0FWQM8FcX9XZA0uEIqGrrrVyD3IfNl8QlkqfZjtQGEsSUn/mDM8RigtjxCOpUaJYRj3OT52IZvQhtmqOsi5f7guWwvxv8Wb38rjc5f1fxNBEDPTQw2uUfKuA2xFgYXPiyBHCiwgb55ewHoqO6NB1PqM5qabhbFFz+vaecWWO0KDKWEAQxguvwVGWSEY4q7XRXCLtIHY0q5GO/58NoeI5q2gwFMH30CA3SeE7yzJ4X9IV6mrqx9+AX94VhMQwjv7bjpaWZ+vfHaoTQyGTnUI6lTBWYoEqiTC3//jW865Kld72ZTHkSKLVaOyqplVyRCcAW6NKbtS+Q9Kc8fAo5USd9tMumqnQ/OPwTH1jQV72vzNTwwwVnFiYXYxTwDxJrU9Rjf9fBc9d+9mA345bbbSEBbbP36xVLyC4A0XrCwhio8bvCVtP7ZI1H/rkId5lDzKjb93MAu1Gpqc4a/+dkYVAKXHGQaT988hOfWcY9/mut8GxZHeTw7aP2K/Sb5iEBpEGNT9rfGl+VZHy1vXLgYLFkBEzDk1aIBv9BQSF9w6kj9pIFOls9j25mrOShTL1gbW7us/oZj8FirredO53tvonUuTOvUfKZstIRAvu/vvhuA2i1N4C/4jzGRxyM/5rb++FwKAkmhdi1BGm8qz9nmz/+WqUkCmeXAgjP7S/q/pn9YABME6DatEXUizW5IrhfDttvZ03KQaMy3r+wvYmzq/6SZ5HU5pxSzUWQazimpdXFcnNtw04Aliig0itExMDaMrqzbdCBDJvtq5CfzBYKOX4ABqgA/3eMuebQ3ft5/NvyKm/ti+f1L736WHwjlbp/mWUzVGJQsvzRp81ttBnElIMARm0D3JRfGyUUFTL1gpPQGfOGgGuaDBsvAlHLNJ9ENmge8whzJSIL+g0o4jXfI0JzOzFSJBYqHdStYSWtbhGt6Sj9r02BgsW75uV4qL5B6n5TNex1KXtF/Fh7nkVt4Cf/sE0g+KgPIO2mpR3njiOOTR1PxsmPv5xOGudv7IWG/dbdgSIWxe/c5wK0u7esX4KOFxZDd1Fhxvv11OWP4ydXMmeVm32QBaPXj7nDvtZM4Mhpcb+QQxKirXy22EBmdd8yn8h9s/eYutjJJvhdOhIR2zIUbz6OAfuPxowhyT/ohHj5hmwBBuAoSbUKKH9MpBl9csqC707lkofa4+Ytyz0U7TPXs4TBHT0bDTfHDwbD7Vr5ROzn27g7LedpxNe4U+3HiK8ge/pidfSBWjNL2B14B3muqJZ2OIOgn/OZZ6AQLmN6JBxSoQ4m9xiX6Qzt3RZPy3fMwxDYDIkGXL7DmW0lqjC1NWUGCumCEYA/hZBZpYM0LlHg2+3HYAuXi1o5Yry5o68twP0pFISdmcc7OUqRUyiK94PMaUfP1oRaFc8SSORkJYhKTWXXh/KgoykQe3h9ndHzO9cg6NPQ7J7chvqcZosMaqOljXJRoez4ecoEEzhGDjn190iQTGkix/qJVY09VVmhawLTiRzi3181DtjXi2s1qLx2RELxXcxnWKhByPUHy+MUZPm2zoHe2Rucd8OXpWdefAYMSIAhGKwG2UJ16uIHlWdbbn3T6LLvJcSa7sFZ04XSwxjnYvxOUMs+fAvbHrbQr6fpPMqT1+XvtdOD32633JzWFhYshJLHkJd6exwgO/SMPgpzGSOI4z/f4b+Ah9cQh7ogY4d4F7tg+mmjPbsmjxp/dbL79esIOONLXspCi9z8G88zJPDn5MiFD877ceYi6zfDfFWtTHdbk711yc9T8O8pgcgwOgflYMmil+m5K/AHjD+KFRAP4Ea9b1f1EyQpKXuJzjzm+7pRMY8J+oSezNRz9vIsIsdNZT3AFfOYfkk5FloW7pZL1jFBa85wEGWAvlJVAUC6skLyp4KaKTdzFe4N71i3fW17xr8PCq8bXpXhLTW9zQQh32zLJNTpntCvHYLtl5MR/GL0EtsDdn/dwOBJu/Pf45dqA47ztMMpUordxXYlErWwMIoQlr4VunRHBMhl6zw619ZCHjc294QUFnTXGFOmwUh3xpzf9gWPDZoJdwHNT7Tl7HFhpmUpTyc1dMW5L4PObB0nfZKRY00R6uBFqzbai62cd/5X077tEVVMopEtt5W2hcRlhviB6i3ItS4jeLNIN1kmNrNjgK6MevTFZPtnP1KPNH9r9o6mbf76yewMofCywwcqMOzR8CG6Z0LNGtgpGgjwVIzfC6caIbWT2HlbURmociWCD+Gye+XA17eNjr+5HrMs2C3Nn3UCUu5uwqoIzC0vDHZ7QB8Dqbh2C85iXGSZGXFjZ/PAow1lG6BvwHRk6GamR+ZPfz5lWDKWKyhNoMKb6iQ9KFq/5QzQI4YJVQX2Y0JxlHYJqI88TCwrbHnjBhA7gjLozu7xTHCnSQ6IyDTUECeWxxyUu2Ufx3udQ2fkvN/COfQQ/Qy91MCkN1CugMm4+wMU9OcO5/mO3dr9cph0ZFa0V4n5ZJ6AEsiXbFLBh6r3caA7JITghnSCoKx2z4wOgpg1xaqMkU6nQbdEW1tdGE0rbWkod1Z3bMUcHO0zIPdsjrGB3eWpneiJSNC6TM3DVdXZ6qpHabm7x4s2sJhTfvAwIqBmgwHsJVQSBIlcYhCZy/MWYqjFl9YN1bWx1KGRzWjMh0Q8tlcW5N5jSnCYPbGTxxUdpGPNi/KvMf/za/ssrEywsivcAzWJflbvnrRCKYiwKSTixLENVmmEIOUKVScBVfGHZSLHY9xfVlIS8suKy4C4xuyk5dzutrlYa1PyitXNGFU+VtNDbd4m2CQrNmmPlm7IaixghhRFEbZZuqwohUXOUZdE6sV/b6u3GHZszr6owRJKloyYGGGtvak/bOrtKIulx+kVjC387TklevS1ts4g0K9srrzr9Uq0ww9g82j2Epw1p81Ze3CtlX7rlqoQdmQkYnM6Q19N6ERblz3XobLFqg61gH4joQKxhw1iKJYVhaeEN/pCifIMemmZ1GJ8ebaDw5WO4jvCHJGbhIrMFI3teZ3MDkyMD+3E0KWuK5hXKCETNfqsW6/jCq0wydydwHFpQu/cRo5Kx6ZeeI95s722ly6teeN+NbNv9xqAQW6WHTVCPV4QGeaPC7mrfGQCn/7eNYSp7IebOsG5p+a8dVzzLo0vdiSo5Z6BRbqeElfbEgHRjQB/Xk+KigcDW2F2oPYvusCKpwJU+TMdxyRsRNv88oKlLw+yPIwn3bjcWpnU2lw/u+oD4Q28C5WFXQrgA4VLuvXGy/+Muj48z3/5TNFiPST3Z1FEPrIl/mnBkIlsGx5Nl+KRFDv+7YwsDxe1Ca7iz0FnTvm1Tdmnns7Zbd3l6V082d9xr9P/chS/uzG+RiKGrp98NTchjMthaIN+FOuQpEToWbInyy2tAv3EmDa9Scduvj4Y16AP5fhW8ZasSAndX5xooiVsoHDeK577pXmtTduc7vBvcUB6VzpNrOSDwTREyhTOBJlZ++8NGf2JbWvDVh9Bgny0f+ckHcvgCm570Zke5XXfhcv0t+fWqizwNcLjUjICKJ7cDErTc0IKD4maHAAIljqAiXSt8BiJGi4aJ/34md/9yrbP6Vm6iKAA9JWmonIQNQUWSGdOK4SdOa/VUKYD3zOZQOpJO7LX27FTzHwCzVGCN07T+QkQpoQt2soKNrOwTpuP2495mEPVka8friLFTuzcT/i6gVBi63lKRgrAvamjW81iuSu+DfFXc5UXRog9RIbdrjiLJzN5CWdvqbwvGL2GxnKGSpphvh6LChP60ptHycMyB1ylocmA5vqZdvEUaNwCzZOGDBD2mL7ZBeZ/mDec7RPhTRFnqDAAda6/EyoYVRFXRx3tnIxPJdYLMRm3IZ2I1xIMUou+JakZq1hM5nFdXs5n7CUacMVdPNnk8IOKYZocvZCP1AkOrvaD5qkhRki+KYl7qsxzFY6SWQ0Gi+AZHgmNlNkLj3LHlu9M0yszLCQbPqOrJ5g6dYwNt+zhMc+Rt7qazWsEE16ie3BFyixE6CmMDrgO/O4zK+PvEVwERkQnMf3qJ9DkWYeIXM0c0ip1+upNQN7tIjvz1JddwsCtP7v+G/7DGX0lPTLds7z/k+VVG8YWQXxz7u8qSbQ8uN+S0Sz8zh9GYEvzftOx7kolT9yEn+r4F+Ss1IR+0NqR/liu9ms6/zgMLhv4e8lOOhHcfRRuq26oqxZlfxIEAAXNEDPUZqqmK/w7YzK//F9k6bOTkZkBZ3rid32+ySnOcZZV52tTKIxtgXaUacPFFQWWacWkw93ADBDM9IxUgY0MLW9x4sBwRFVgk5ZIn3guukeSEbgQwk2awXiYhrUDr84/psAwumx7CCTDSjsI0WOvmAZ/lGZ6lxMGUjlKsFRabgVgT2mT0Mv0ndDBe2+Ii4gZpWHUiNTx85SA6e5CNRClWvnqsIDaM7c8tG/Gsj4b//qRYRA0d1LiCL9Zzsk0lvcIwUAaLvNP+/EK0cF7N54ZdjAZFYq9+aHBR3w3VlTkQ9N2vFgIZjCTEXOEd0JVnjCnKo6XPBwpsMksN329l+FMw5pDslQ22EFp2QvO5/OdBg+ygGS9+xmMIoQ3h5xsZ8QMPeDF1iOlx05b+L4yXhQF2AYZ3acrXUy4EcCHU7OifpGtoXhbF6TR1YTpqfrwY4RWRjyI+XCitt4z/wKZ+jrmDR+YOxEYWVciY+vJycSiMJHp7i8IddkAElxtse2kNYZ3D3MTVLlrlCy5KC0Ap08pQGIXnYemF+SPPtG5PjOs71ZHk0I2hfrzQWmjwvZ7xB0C/NMmUgJ4cSs1gnp0s8/dh6JUsmR4OX00mgVj35knJaOulfVie+t/tE4+BomBBppg4CTRIMdXUlZA6r6vk915m2mIYGa8RVF//YGxvjKlz0cpY8ag4Zl9Jg51U5yA1EVpEaysmjSrbrCqXCLfiwkQAyeBRFLb9Cd2W4rbDqLXAdTeYKCJqdeDWD5FJwLu7s6cY7AKUOYghgiroxPSK8J96FDBnBLnUlN7a9dLvmfTsDcoQNZ7rUpNmr4SkEAulNcUcFs2UERSnX6sBgMgWAmcMEH5Z6R8IiJoWqItbEqlJYPTkM0AGEwW4FUCqf9OQE/63wrsPphIyaUK/sGaInuZtXgS1UHtxvAFg78XXw4/4/VXl/nAsLhahvO7TXZjCTjlTaE1v/WXiLt4czgiM6HhEqD79RGB1iCdSIaBNmLGsUJsinLBOzFsNQNJ1/fhw/efTmfbuRRb/qaId05hlIhT+NNYMUmnMBVA6VpA4D9H6XyjhSfrKOHTYi68P1jhBFvYTNnaDCZ/fWsaEroiY5u9ZSaa8q0LdzcOtmrGGrhbxbra37hkzujpk9Jcg6S3NXuZu9n3j+dCHEfbPRDUhcgd6Lgw9809/SB+sXdn4y9afeu89unT0zslbgzdGRi79dsW8SnhVDcOuZdHupZ3CfxHwr4xbpSNt2ev3CGuCJzehbkvmjxP8/WS5JDZyZ9iXTAljjmMOn6PN0RRTF8Ni8mIsRVMwVKVShISNpuS52/5XQG2i/mbkIBzN0LPU51Q9u6IETvsvHO0fakUi68LBkNpBkw4hmQsDiuHY/4EC8+cyjdGAJJ7Ggd/3mtGx/+LBf34Zg+CEfJ7mgYP/4oE/JFv6P1pGX02N+GgvFTzrwpX14gHh/+LR/VG4ptnYVCFPorWAkDae5BbJ/M1NV9BxxZH3ZhAPNlBL2KG/b1j3nVLFWrUGLqUo1a4d1GWqXPeX94zrVdOUG8ESf6t+F/81YTpI6JWxLREQYSwl81gy25L8sQponUtGex5hg5w5prEuFp/o2W1f7zF+riuzJnfieJMsAYbF7s2olP80B+LvSroPq8jMN7Tu+O3C1LfcrXrHtyQdNDthJ3DSaEXLoC92wU/HvZD3+Gp5wJa364LOa3zmYdQaZyZlw1W9yD1OySCx1jsP8xRbjX1D14XIsQCMj7Mj35V0FiOwr0S5R3EpM6GEvfjLE/D2sJRY7GrJwWjZt77rJIQXFBt0EqVl4onBEhBbt/KalrCkpHR9IbDFlzBD4Iu4e+7nhCNUSKLQS3+7SFii9uXnKgIriPnxEH/2ZcuBKewsEi+yZjjsH611qlBrPFoRGo8+s3DJzUi1PHnNut+w1uTn2yrJ0vZnKhU+RC0k79VUzCDKj4EAXrAmCIgH/KKjbNKx7lSe3/ZjoC45z6Ac5C02ufBx95NIimEZjDeUUUbZ8wzZuYGHYjjWRRFRP6FDZUULFoZmAMBzf81HeZmiF8B02OPqkLfIgCc1Fw7iB3V3aNkY5nz68+DM6JXj7YPOHfjcBx+mqmgQ0bPwtVZu/JEn/DWNbDp/V8Cxv6geF6VuvLF3ZBq6c6js1h8H7w2pNoW6yzLtmkTjuAt5lHnxnmtqnjuEaPuRpHIPk8JL5yPbbxGqRRpa30hv8+uZ59kKd6b050iqpKRb2kr3lK3y7399HtBK1/+9MQu5qL/52hrUcjZW5c1skjRKMJeLZ0DF7JJHs9nOzylZTZXMhJ9kq1GltiJcq+qVsRh9txRPSKgMxIwF3sXsEvNZVqQYMC3kO2xD35sU75DwiPl8ggTA9baS1MzhqThx7a5LSfts46AfagoLN25HPfyi3jPHzZ2YHk1IZxH18fCUwJB3mcrCD9x/NoVM/LAg5Cw11FGey6TKNU5rqIC/k5vJBQN9VSLeQKEc0RUM526qjgSKmhGG5SuAiORcl7i9XxzvYYXV6qc/dPm5KOFtJd1Y8MWRuXJDk9h1SykZoX9M7qhNbpuDciE9ynngHl/81HiP5bXOlfQH7cpQfzJv8G437AvURQaBJ6E+Yvp3WpNnH6GSqHhgG5FpxVy3MrEI8+NuVFU07QdgipteCkm7YTJsGhfZrFRHkIutSy0FNaq9WIzt6yWIUSZHIxK3WX2mYHWoL+c2QN+MEYceC4QnznMo+dqttRyLpRFmAqr0koJclBjx/PqtLrWLgqEqmwIyruQ+kRvFBhmqWdWoG4oqChQFfuE0hN+DI+XdscY3aY2S4WWMMsRfgIzLCGQ3nYcfDVMcq8NTUM08zOTJ6AtE9US6aj9y8xzkW85WwzjBhe1gVxPFu6sdkS3VGNz5UIMH/M2xmRNPpFTgwlkKke2ykiXJSc6zvJjSdyjbLE0xCtSbASJ/WqV2gNc5/Xxxd+TOF70Obvi7ZZaHQgAOstHxKOJMUQHSNfM1AQHWcLB7sM4hFt5BDBVBeIWMY2aLNgokPE3KzhCSA97d72K6JTrwUR8aBy1ehSTvK43w6JClpWJkZKg3uTKcU8DZ5l40OtZI8N/CWkxtw5Vxb1FhkIWzSOQ4y0bbWrKX05oXaesVMDcj1KQiIkLLlf/YRqUsKr1GFJ6ehqllmTDJU9DqnCBprht2F5cy30PSLOdd4cWy7hPT1Gzvxpc2vlvbs8db20gr0Nn0eIQi+EdFvhIMJFGDo0qoyofeeuvx9pmT9aQ+OzUvr4Jj1lccX8ONglHYnhcZtXneKRnTnSdjTPOWoV3CO33fULuDscOKFd4HdNVd5o1xRq6xfTm5W1QG/rsH3gy3YnsXOwZ+2+uDm7Fxy9rrN4v4WusYQXnLMqfH8fePE/ZgtImNijKdbcGyv6ahz+X+o8cbGF64TmNd+BKKmOxmcaTAFcs8+bzQa4vI2ooOt2jHCB5IldQNjna327EtGOlXgPwtmN25nx3ywM6ITRGZwXrOVbRvxq4ww2xfnDzA5uS27oKWrt9uEjLbR97P7MIQY4LpLlFZRlcp5vxMefm173hE5d7NZj8FfiZb6yNVU6wWrkGNNe3pujqCcdea0sNNRfF1dWSCtELFTI/lqYWxta4J9RWQSqbWjJLJsBTseUPpIbA+GNSqyqIgMkPfxZ3KeRX7HWn9ZJ5rh/jDO3P8xkONThKojId4Ta/QcFSIy4kCySv34kJJ5fM6iPSKb39kylSx747RuCfLQw8p+zcgDBKz3+729HIUZPf3RZY57peUyck1YR6qZvPliB/XH+o0W+p6YaVS/OzG0GvVBi6wWYiN5RuGIhm8amhyRMZDR6aXxijVfLAf6qv2iAa3aHJ8SG9+DlR4iqxQygR8o7pOW+lqp1HmviagVLgZWX0MrJr3g7kxAmxEwj6t8FDhhMIYJ2ajoSQNbGyqhIdlcEEUeC7nyW3//m76Rnm9EMWsUauFXbQhOo2M6FrdzAeW7/dTfXFM6PLnaCgCnAW7v+WNiMhXOc7+Pd9WHZnziREIE9dds5TeNmYslEnkfyQMl51pescr5PPksbH6AHhfrLOYH/wi7tmbA43rX3w8JOq+76uo3H/sdbxWYrNZrdqk3uUOk+r72zkN5XkDwA6y/ft1LITllZ69WVZLyRaMA32/3FN5H7FGd8FQkepZO7TwE8BqNT8q5Vm3+pnskw6IYBukfYyF1KBoXGlDqUUOls2wGGP+sZztY1jb1Ks2bDsJj89ev1LYX9uQSq6BeU13OGVsz3EnUf3tkej1KhvXBElhao0jwG9lVDZzS2pi4pWvgyE/mahG6mmd1a55USsw4FJY9EomkkvkhqrGAfUaqaunaM3xkQN2NFxXuk+hKSKj6uf3CdHHh6vYdfD4oiuUSimweKCnWxyCwqV+2pRS70dIb2VmWN2sf9mnsy5PoPucwkMeXm2lo9bIXdL3P2mIZT8H+RCkYdVm75KeBZ5GVSpoGMot83a490w3XZJvPJU+Co+amMF5HA+jUeOvWKZ1xmp4ZFnG9fF1GXcJjy/0JNPIPsoezhG4xZPPEvLj/tOXgZcnU+fZr0Fr+fvQyHvM+juiNuhzUVQmGoPsY803TVZKufMK83o4CBH9NDamc92I6RCbatsmPaWcYOfLklTDEtrnR8VJaMxfquXmNHZaTCotP255v4xroTt3Yd9JGIxyH7swrn2xpQTRbZSs4ncqd9oiOp8FZwdpNCngMNtpIm583AiKREeNc4AcbdUJTlUzagzjzb1eSrfdBUvZ/Ox20503fa4SGvUfC6ax67wGb43M9er6rVdoodmdL0AngqS1dTdxf5/LMm2zbnmSKUzzsj4rjpuNZR4K5QjkvXIvoJuMzn74Wg0dHlVHbCxm6qShFjbbjQVoE2dsUWWBtrjd3sZyJ9qXE0bFOaUESq2ulA+BKjM+wa6zJzcbUqd/O40sr9gw8UsgNjI7HW671z8dLn25hPTZ4ix2jUqu0boavIbcOp+KOZgu1A1Q8CZShjgWvbNHvGhA/G5wYvNc8DGFLbGGmPtY7d9HFAFv6xKtpU1b9gYpxhI972s0BW30KthTti9N9w/oe4MsXJ8pC61iBTt+3uJR/+IxLjaK1mumLyIsnRhpjOxR+3Byb5SVoGlM2X+RCeJV0eErUpiqgNPhWaF2kyByh2Ca8hpZtm9ajeZxs8W4cCbtEppzfHZBrW4gpbroRMnEwxPtO97/s910xxnqBA03Ob7cUrFNYRL3Tm/Kqz8lWzqfZEp6EAc34Z6HTnNNbA0JiYmGTFJs3pcksygOyqikqnQjFGJ7Db1J8nJtWJvl/Y8JrCzWMdlPwtYsN8SHPBplDza9wESThUOKSzeO9Bjhj8bMmW7KeAL9xtQ09duEpVbLuTayWpDVrY3KUDjzidbKsG1EY3i2jwuDVHyI0FkviGNVy+iw5fOzPbmBFEPb4P1Gjiy2d8SOo62rQtc6mnK6TZFxz4xO4THoSXWg0/9uKh8KIzrENpuH/9Z8vDc6PxSuN9rvFw1VANJ0k6BKh2mHBBYrBV4F3OoOUq6o84NeJolKs7byDBLKtyoUnpOch3JMowx5XBflusXwIyBEz52DOpZyghqGZl54wRZgdRUMDxqZVYd9O4WVgEtkrAhfH9a+oYUkuDH+VUm2PEF/nGzKTv9wAdw8/4WNV3hIJwlHgfJ3KdLM2AuXceoNj7927nXwbJy26ka7fQKItYzSYZSUgv3w2a2hnU0bknXjcWXgqI6XQrxZMOqjwl5Ms1/5Xh/45KxWYCople9S3EVcI8orEbp/lKGho9xqfbVsEJI1Quj0g+5AJPlbOewu0j1BxDz4W7OwVNDvFcRQZLU5R49bM8QuhQIVVg4vLFqW4n6QUdF2EBFJAKqTPjWyK0MW3IwNfby1qVamY+gTFsAWOGCJApadQw+G5Li8IBQBv+LKCILJe4n9Eh5I7yQpmI4uuuwSa3D2e9sWbsskC8YsgRG7G8p+TkN4dJRGdeQOUaV6LPprRxinGKdRHOeUpuN+bUgkoHcS3RXQDQp/4VMhM7w+c/wSwSGBpy+XMhZ5OgK4TRBqhgcdqkOVBBz1FbULWMHYBHbwnaBbb4vr1Ci5WYnn0Zw+P7ZRplasEB3ZM4VIUPnQBjg4vbkwCrQcgAUrvEmciVBoIcWFP+TBcCGeHkXbcWxI+UemsedEX+zO7kna4WycdXJXpBygrTKNhMKte9nZXTNrwCopQIJpCmTm2jxxUVZpFg0Ju3L/6lSupUdHu2L0MuIiLrOOCITc/57yLcn+Qfq5QxsKMAazvpvDzaiCxA7C3v5ei4ynZ0yXbkY23344ZRp2t+zSkbjuptuaWrXrmS8kRZYpiSmDaL/k5g06ubpMysRsakwei1F8MNW8af/nfh6izs6yIMxnv+M8Gf3ZkcLlDB+DaXNp9Hcn66aA/pBOOY/TOHIppHJv26LW83u6+kBqxSD8HaGvTNqBZObANcY0gOVLuHlrUXgOXcdcjZG/6X3x65hKNEZxr8y1o3Rg7ekfyemMDT//F6z7o8HNXdWgXWMF4JOMW7U3lzqV6s4oRd8v0WthbzJn+0Eyh4HDbuG0ghZ/8PhrJO8wPlrDxuitVtdHGxofGYPhybvjZObw9iW08It+8VcaqBK1e18bWGt6hCU4N2FNNmT8LsSB9cESn/SBft7grlESsWkbn0QJ6Iz8KrTdwNTDEO/I3YngR3Y7YTpaEP37lGnepfgaJmNXJ6eKEpPEHaecBnwSTmup3aI72ioGNNsoB3u/p6za77uJUOIkMT9w6t6UNI0sUOlkahSVm0UzZX6kzwPQ9Rdxqg3eHtX1TyOlh4VPGbMDErhfwmcvQUdSrU2vP4xoVTR/MtTPKoapslcn94IC25QPa23atmFNBJqNtzUKpyvKcxTpqRzvxAThHJjAetsv7bBZIf8sSm+iHQNE9lBTgb1y4KLqSP8CDGiuX8zDs21X+bgTmWZij0ieot838JwN3Vci09w1v23xR5AbmtIBSRuIkqbcuvImrfwgqamvag5/Cf4Ctp7+pT21SMoc4C97aj2yPdTeQunxm9H804bJ1Rh1EvLv5GmauqdCwV7u9/uV/HDUhfwRMZo+4xOKcgrCODr9a0T86ZyTx0nD0aG6D5dzNLoly/hEHCm1n6fuGzrZKdeK3RBBIbNvpZaqe3xJPWLCapFFustzvssv0TNXkcpxtO2+myHa0ecav3xDir/TnTFi6vGzoMWiK3iPZxuQsTKW86tmuquw1eXot/KbrkXJil5q5rxdv62aNrHdtVQlN7aE8D6P/bnC+tPaJrRCesmVfRyTGJEzZLXr4XFKcxE4NVClncamlFFVa7PTlSwOTYVkE0+WEX81O/6UEhr7AI88+RULKwhUmZ5V3uFZxlc3YDqE8b+OYrhzKVnhBSIamVPpkqJOQUJJhKwOWJjqwB0KAkqhTlukR6BkmLQVN6mkeqAR1+PlmV4wh8w7pqttMsDndiuaCJVrrGHvXVD0iRylN5nmf4tmz8XBkhH7ndBLB2RkTdyUgu0tIBmq8wJYgb5CushMWcyjyyVWULGaVcDH+tlUWny/lpN6z+bsh5gXUiEaeWZldFUQB0a7FlyRDUhodVRoMHdmT5AsrC0QNHNRQeVRQrmMMEmxtuqpYAzGJ6V1znpOgI9TU7qXanlnw0llksD86KmosSMrXu+fRWHbaEJiJq+DU2ZeYn5E8JE0IiXU0S93coyulM+S6nuvy9sHC9hOm+wKmxPPWR4+rtHBtJGt6cTXC8EVytWED6lu+Vk8TqMOg7yyjf0VPl1cJHc3JPk/yXy9qUT/64Jmgq22f880V+NzL1S8/RsUPOZdvX28/wrfdu8BtmWDHRuw0bPzq6ftU9Lf0rAEjnLwMQdm7v3+eP06KHX3KYo8zcG9cS7l6Iab9a4+J+N6tg0VKMWhwg4VdIZ9MzJYJE2GOuUrf16cRXsfBMUvn3FlPqbQmBKuVw1khyhkX2HiQ3e9IBE/OJezetVesxA6AU4go5yRlCgEpYkT8DSViGDL9zZJsrQMzZJAT0VmbmhZH5O6elXew8inDEykz8XqErywr38HBOCIvvGqFsGnwv/RJjEf7oA6EOlb3VAw/zS0YaxqJwIWX2Epoc/KGTsblz6tjM5XEQzfY+J1VSFyUaoSX1v3xCFTvjwpL2eBqFccb8iPVtIhRVJvwhiN8nSvSNkQVZXM8sd57fasOBH1t/da3KRyoCLPt5vf+yk9KrmQomtdNyH3iUwQE4kE+6pTVHOqrlV9GRBPVVcC8oQlSqM64FN4tJu71hTc/9+YUwEInU0lY58XEIV2R49ZsWcJpZd768YSHaA4MaaT64icENOYuFeKJl4arl0Q3vAXR8FzFw7/3uH445ZiGrz5jBfRGhDS2WopcaliaCTQqyKOEwlLcd7zjZMXgjE5DB1bYIpkiQBVWNaARoj/lq6x35gZcf5daxiHlcpkShoVvrQXuqJSpmZ5PinRaCrTRyjbtYRkEYRREsKaDSy4GRsx2LI2rUhyLKcaC1lztJUkppo24v5i8CrK7BBga2tf6NelpOt60fw+77B2pqgYTVxtczO4CEMoammoW1gE7ueJYirnlRwMEG+0fO7vc1n00CBN++3CTHVEaN09qNpzuaR6UAMH4irxRSqc4sjQKKfVx41L1j/ouGVBIcc9Gd2S3FhY79jC2bPwaeQp/PNT7XkXPeDTPmCcRJvHsZb3yhzbwvaCJPIOJcxlV57uzQp54pM7gTr24fU9aA2YBfSql2Cvx/6sLyul9Tfy6+FxQOPssLvRz1EowglspRrCJuNNIH0HQI8O3/q6iyh1gRszN7FtkF7ua7X2h11Ja8mP4eaDeNltKmYr0XemsCgDHMfOPzWqzXWi4hvvXinamVVA0ciM3sE5CiZgS01ii4NJQHnxC4rIdhbiWrg2iIhMNyWpwulikqdGOGcU4uSCxNYM9hOSYIY3NOC2vSBUOoyNNj/bWCAjrD5Edtb0NfDBX3LISmtgTc/tiezLPze3tjtwD0dtq7fr28d5MPr6qd3kugHTgeczGrLbT9rqXC0LWaeOLKPj/LgX7WhjU7iksCaubCNGe36Iye1Vk0g3pF6cF4GuFFqNYMaS3RhtGa2YtPFX4IdGbfSbu8YhEDacowFpKng51p7wrayIhjpEkCj/QZG7L2R0ebVRCg/MPnsQGwcbi7ostBp3K0Z1pHZNiZaFMAobk0FLcz2AoGEv7xb0/7jcuvwhGrSRtBlM/Nfh0ctseL/1i3Cq9nP90kVFOHx69qnknVX/KoqPN4OQTRTyFD+yYfccn83Ja3c3UZW0Cec/ns+hJBGshoOA0BsGH6VeLILxO2/mhO4JAoUKCz+VGi+ZxBwVLbaQXpGwre6G+1MKmS/tfk4yM/lGpuIe3PxXgzmRv99aLe18Si37Ns3blwMGOlEkAQ5qyrJKGx3KSpMBK5PCHdmW2GseatEyga+6IPXsqo3yPrbeOeN2WMRLptl1UrNPlVKVISAIME0KpoZLTkrbyS0m5NQ+3FXl/VLhECB8iExmT9dToppE51NSkKlznAwUSa/3oRr6utkPaFwzca7xha3yyNnooySQHS2/pNhJhYvSYn6G9Kwbnw6S7SBHL7SlDb6Ug0sUeZjFL8YJDI+f+T6/yWe+TYNOXY61F/fuiGby2CJTGrCcbhoogpgAD8jNEvEdm5QrVliecILxSbsXM+3lPsKXTq3XZIYTku00TinbBHgkWWg1cgrqNRfoLMRTaLFdydFfFSIFBikUynJye6zphsInxSVXSfQuoFbVwwFGa0CzTsncEMDD1yTztIKksHaMZ/qsxQK+qRJeLry6gkgYfpM8StA4+WWblzFGeAAsWRCGWfwJAXt1aQ49rxDmoU64RDKuTOAmawNmoy5yjc8f1coVmqL8O7oHCSmlCycnXOvMuRzb4Bzn/VE1/1RbfenHdOJrmWojZ8JxzozmM7oCHtDEXvsX0c5sVSo00D4JtVEYUldhakrxVckKdgD5xWktnjSZksMOtp8Gm3JIOo6IoOELROQYKBWt7gii0G3OhbUqhNFr6SSgY4DczyM4SanQie8nAMuhCYmndLVldXZVLKyeY16rFoR0oT8eX8twqzNV9TDChkpConMaTqWPbrRRE0C/p0STD8AFkCkJdJfLPrQNrMzIfUAeESLN6BZ43sHuLDYe3w2Xd7mdc0ecvMM4Pidta1Kb6/s1YKNiS17Cfz7x0qw8i14Ux+hpqL1F9kWyUN8Xa1NwD9ivXSoVa0wX1eajzCTneywMypxpFT+UpnHTc4DCYmemxKtyCklt7KdQZ4zzEQt4KyzagYPaSTYeJ16emT7vwdpdK4pmh7XhmkELLmiKSNXz0AohtmkfubGtyQkWXIgl/aP5tZDGpJ2Ml7eK7zq5jQE7U8BhwXtWG9oH8xcXYK/F2jDdf3dcadg5ZNGDpMGIKWPa0C6uzZNW1qZgEQ6rwUiBQMhIR56Dd/axBng00Vr11lFO48a8nuvEkCvovtPkBcJF+HwBLwy5YekBUk/Y8Q4beqmCsGhxL9ILuRE5rWWHvZtSQkYBTYdiyIWAOdfFZx/Zpp2rwgS5XjPrf1ewVyk53tOMgSUhT9ictMSDy+zbAfaA5w6kd55R0+t/Bn9TR/IfWuviG1c6u0pcM+FiChNCkmiqUNm5Yi1awM023t+qkIgFS1mIp1hATguKSSXXc87CRgiYroC5+79XjB25/sCTCDeUoA8Om5Vn/W4q7FEMBK+d+P+060Rwra+O7CvTV+npop1BcEs/o9S65Mz7FR96aJznUlP86gN1ncQYLtOgfWYj8CCu5zTL2psiBEaFxKjUiI6SCtH2Q/SArcT2gzVEp/uJu9US0vxCB1hWNY8JJ4A6qrvSDNeJxPhm0va2NlhY4og/krUKtCHOwfuNTBwRpwjmuEVqG/YvsfDBhHTovjSFCm4OfctVu55tRlZHYkAUOo5ZkNXx1orE1PgGp8dVB9IfZ8n1GJOYqMzRKv/CAhrZIancyRvnYcMwtMx8r3lmAC6+B40LkdJ98zYE7IvHRLEAzWR/FixJQC2sraSFMrSKxyamiOhxW1PLQDEgMshDCLsbwVPURN1bwkSRW9KupyeE9pLUVetELc8OBUnXf77K10xTMXY+8Alg/Hc3b+znSrzVq++YyJdJDj0phcO2afEmOU1SYegBIeW7WncFqxhAqzQq9EqJ4+Iblue/LhniCtBmVkV8TyHTKrpqpPX6y+9eW/F6VA7XDRDb0z9tS6WqalBBXkFb+uoQMcOY5qr/9CEG7h5S6EVseF8Wn7XbGroPl7mkft4LaZGqdujjeJBIDSyvkpH65vr1YYH+1AhnjgRYWbTStj6XuwO/H/5A+Pd6ebnIOcJoEibpucGf+cq8ceYPb0P6XokxYHpl2b1nU0jx6TXDQjjx3jOatJIvzihM9PFInihM0XOPM2WC27w2s9v+Xkx/8nz2cMqlqgPEYITRJtDU7fipyfg45kv+9W0XU63IIHocfxdgSV9PNf4oLUe4I65Un9WpKBovhx1nQAyD1WohBKmb+e5yUtgUhkAWEIFM07fvJZJwH5Mi0Ojxe0RPhR2Wy8wpidAF21FOUDWDBYOd/Ciob9ebggK3xcpFjEXBjNQql9TcuJ+txgheBupyyPTzEyVdX/eJGJMovR4IhhjLTXp72lgi0W3wO6W0qtXqVHA+vS8NRvBkCY4yQ6GuxwZ+qCmusI0zTENri8Rrlpo4mIpg+uS8eyx7FBErBfkPgeEOAkYj+sgesRYYzeUDNnxA0+k+x92joZstMsg6TSp+DI4qLd+zR9f3idP6Do6HTNZ3jbg1PXLCP948p9jfH37O8M6g3L65GwhxNYoEXSYH2k8SHK/Tm/uLntTDn6fi6Xv4Mscnswb4qW/z4efukREFmBAw8EKWB+zIADTO5oafasy2lk/S5AJpbTjIYx70+YYVmu9Fpf7IcCzMIIeaTjt5Yvc9BRtkLO82lbSbtudbujvGsaqT8Gcvi2kl+uR7ihhGwS/xSOKH2g2oPlgvRpvICGalvNz+8b7rwRptzadKFCWb1AfH5mO9X4P9DDIDpPxoVlbkMDc4YBdHIFcPxBRNIjch0q66aNb/k0unrZVt14CApFtNBOhKVqT4yujGQSCn23pB6HdPeObFuXYvOVpTTqg5dZv4MN2eIp21KQ5wwIcWU7ztYPBIrh41qogiCpaBeLvrHZJqPp9XrOVDlhhDZewf0lXPfwWsUynN4TGI9+zoiVWUOfWLCV+QfmT3FrkZno8tsbveQFeKt6DI5BFNFUKtOT1QaiAhe9wuAuEAq0CU6x+5XgKj8MwGDTKzP6a2bHscx+onYEoQIdjfM823JzfKc/RGEW0zbC6jcjHRmaFJvmCYvXt/WZH44h7VQiDkECKAa1jOUkKZDtjbXvFXopbNiXKrCrPti52q3N2abFd8S+ti6L4S++CtoTunRnkveAe1A1ITo9rdOwGAnEfCSKD3JUh/5SxRBh7ASZbtiAXPsRUuPciB9s3B2+a67VDWPiJ/FgfP8oNlraGosTdhSHmJO7hyPUYUi4vRs2ZjPgCHVHq8oBW2Y0gcVT3xIpC/DnKptQJb1bIpsj2YqufRDh0om6JWQ9hIcexVxqJS8Fd1XFty0cDbEM30gknLQkJVdOJO4APx7x0W9vR/Ugtvk7SS6EsJGkZMBfCMeTFgrTLdPS4NP+omdWKEWllBnCVIG8rG5ts4Nj7RRShMOI/uQmno9IdlyfDSWLZeVQQPRPEQMHtoDPLnE2V7Qczyc0PIVdMhNkzP9NEnxJS3bVE7hgN7AvXEgwv+1a52nPYF5HheTIbo7B0xLVOMeDcZQ/EJJSDHAhYEqbupGPflGLEQ21QwKlTBt4Z0ZLMjcGf1mg5Jh/MUs1hGlu1FBaRhqCKHS9A4X8tL5k5Ti6qudHYhj34uCGxAlJLuAEPHsPitcfnXdnmwZUY+080hy7zbOsIKX08qYkVREVZwyOtTi0qaRnAHNFPbYLp9VG20rKoKDMaKleBohBVmENxhDKkBOBtkxMCAa0pFm6S8/K68jOJgTjHF+XKOLDxDhuy8RSpTpcTDqOE2qr4dVVXNCAMUPzaNCjS4SVBT7VaiNuF7IsT5eqpchlLr1P7jL8GKT4N45R9y07XzvjXrUKWi8ScR8cp/47JPhO7J+YGolXj/ABmz175DCAxux5Kzfxu6sTchl0z+KRx+tNPfdsp25zJ4/OKjAB/05ddsdePF/gHLv+pCEO6fIHidvBGyM20/C5hqG5YEAsN/F9qpp6+HfBe24ss5fkAXonssve6bZsdQHvFYLuM+iUKSn95W2YhLd8x5He1jS8D6hCdLi0IAYgEEjAMfyRL7KDLKuVDMSy6CxZzdZ4xjPhvM7+i9XApkCv/L2OOsZWMedc3ZW3MEr7wDAIMsZ5QM+ouikAqNq9x2YZpiUkGlX05qVO8psypp1uNXGMpq2GePkinQBSyWQK6RdRRULHGPOXKMGb/S0y92QOMSM4LypECyGRC+zMXa22G5iaqt9OqnSPJHOXRNgCYvtlMPmLrke+YGOS/RwF8go7VGyr3FXy6yyQP4vMwin/l4wRQTRXxOVrrqiv9Gm3TG8lQgZW8hg/zBnxrm3qepqOAgyZXDlYQ193MpOofF3Ghe5R5bjfP3GP25vBNcza144hWOwJQ1Tikf/nUmznkFdrD6J8rYaWczV4LJ8bnAg/57/u1fL6g1vxJDd0ghjhQ0FfHRiKp6RCH7Pkm/WdoUnouxHL+YxW/eNeI5RVg8w1YZ4SWZlmywxGRSNQrAjF2gT7b4lHLFx8enl5OFoM0W07BIXBK1PpLS0lvvhYTkgOlcYLfocN4vS/OufnkP+iIZsGC215lFKWZJLJt8L5ux9JQcHw69HF70TkEWmYqNo7UV4tFzPepK8LZEMEMlGZs3XKhxVXKX5rvNX5w3+nICt42ETIFmSlunbH0mzqed4iapJ4PvKFclr/NH6jxrach1t1NIlq/dqEs/eT/Gds1j3D6gnzG8+ZEAV0m1xptzkWoDedRsWg2wOLIA7sZfDe8exJGKPAKrvUGFjkQlx221l+syl8rdd3XPJ2Fmsg/omSHH9gOUVVFayu0yCwUH3Sz8Md4HoteB9GVrO24YbooRE4yUSkTdmkpTna3rnRHLPnOQuE8p3uyKd3H+Mm/+EGY7wBvuD39saaZKhXS+rqg0WGMSO5cCURow1QTrtc26iXCTmWTKHfxiotvJDTJawtPcnxtTj8QfdtFuSZzrLg/gK6ch6ywcbtqcsNJCfBM619DXVXv/eoHthV0031lsbOL9AjZPhY3mZ0VeTjzWJqYpYOm+3FJ4vp5Byie+6cgNq4/G5Cv+nVqeUPJ94T7tCXRPTnRoiUdWDnXLxu62RvNDUUz+bQwSFB9ua8eJUyWzMSxI8966pmhEawukrutATOXJ32A//hVTTPz7fI3jWP3gtAvM8WtZLiZJ4VLF6cX71OOnfWFTow3ZE/7+slRvVVrwhrP0kopi6JlwJ9JlNQQrlouxr3u/kiz/hWQyH7PaQGORnVnuHkJ2cjkW1+IkLgii/8VN7mUF9z6NEQ8CtbC42V7keGysg6ipi881oDltWVryFFe1oBGnZKoyuj2wYel3684HejlOTQqOZV9aRk5WunuvLr9YmSn9Jwl2Kfr1ny+uSApiDhTsI9u4II7xJntGTY/1SKg8qp2MsdS152Eh75MP76oMhl/8Y870vpZ1x9BHm+BDtdjkCd0IN8UaiD+8qMo4s1IvYFh81GP7jhlEd/H2uYCJroksH6qYIcKr+ZEN0+cWxWMfgXvgf+Y7ZFazbkxsXHdXMjobwfD/S0C+AciaCbaf990ZidHm8oTVrlMYsq8o4LaykqK57Zd+Mc9ANupTswnXyWNu1C9zqpK43awQ1GoPSSXEvvKdKR7WyKzIjDPLzxaXDjOWtw8bVn++CCmIkZpNXu9x9WVgQmvG/BFrBFXD7BUrMtNXnJl/djRrHG0sF9I2kcTLsmCDoRiIbVXTw18Whamo8bHyG2WibRqpV98Yoz5eBGaeU5TD6zgLUHZS1urvSywhg78/mj9fqoV/xHP6r/b7AsfvjlNqeXNW80lQRmBZxu/QF2dKDHxhLEXfWaOMJnZ3lMjCIbOZAJpqcmUOr47MTvcpPKpILuqR9fasGOPln3DKzlo04Qcyj9kU6N3JCzaaAym1bJNVmaxBa3f2Ro0Ps5mKNTbPH1DCCGVCtuOjzIigIywq6IrgYotqmplGOOVtizkoeuJs85IFdhhJuaaAUxrjEsSLMCpnFKEJfPjQ5tT5dMJGWh49oeEPeQ+6FAtNmtwSclXB0S/HzvBO0GL5s7A7FGGXXF1k3vV40+Zl/nLJQ3Gxz2033Neev19yO8TSAabHgNodMp33PdHQB78+sch/3495su96fmhES6aUcsgA4dAjfyQ7prwMM5x1+J65cNMX3D+JDOOO7XPoEtYui2aBg5xuFkRh6DRUrnn3B7ScylXXA9EgLoSyGkhhkC9Bj3c7JL2nbtYi9qT4aQivjfPj686IfDs1jYLtEuyu90+pWI8gWpYjQ8PqKZSKzpUOq9M5HWnsXoVh7z+iBZsYuNbLNxSVppMV4lYLCyQswuTbwy5TqTmKJuRmTA60WMD5KexEF/s4+3FdAZVYbCt+Rsijcf/ZhK0bNPntYBEZC5ow6hL1EdrQ507P7eUSRtTj8i7pbULOYSZZDYTtBfjGfRVa0AnqpRxtP5713HRyT6nvHlEpIJi62zNQPULvWqpkEKbss5LKnrRAtAXpyRBr4ZxNuZpUUny+AmwhOJRzNzTonGiiHYEnGekC49FFZ27zVJXMj4K69bc79y0P83u4zOzfs9PbpI7fwbF/DWfYD64GGVhUGDsYsv3qPRbTgr2bodS68NHK+0fG7nFDbxjl+YEZ44fIXsoLrST6/VOEJ95gWtr1rn9vREWSCWL1OlIRSUDVEJXDPLX51rGTYqsb1GteTClnZAe/ZeWyx6nLEZ/hAUnTrVC7ZrRSj/r39rQRo8xhYbFrbCNTPDceStBTmSOF64T3soGUd9p5JnXCG8CCD82YSeaaygK58YsK12R+6ZLUAtasSELQBr2ISE3AifUAeFMfJi1ptKxKtPQO4IC0HNY3cQ/HKcTMr4pxX4TiKODXzSWSLqJTDtEG6gMQD7W2yHVgBPi6rsXP6uBDndDymBtT8Ua6+578571Gsc0qU+qv85ngFoi2HqJJx8/pxl+QvVryw8xUsMCPJ5Ugt7HJRRgFtfZyptKE4YbIBifZ79PQX+cGNU53GxMfiwzHzkNrq6UyaH5sptJz/TULXxtGFoDjE+1Y+EsPON2KaR3D9dCzaKn11b4fn4CtMZbdM2EqW0aH66eKkAJNJ+a8qqqOLjMSUQ9UmmUnTciCdnCu+i6GyocM4R4Tb2SQrpwjfZYTDz+RQIBEXKJk4+KFrK0Zi9URx0UIF6VrNV/v/2d2cYYq07EaclL3SK/Z0zf8EFp8KyBD5UvTGZUyXV19ltY91lrJ/O7DQdHcE10MHae7rGLUPWMnGTX5x7Fto1H7zDVdfirRqtLonveZhQr8abx4YMuCTRGv6YT+jRTjBkqsqOKsHd9+vGA5cG9z7mt90gfDBruNLN5grfMltbiiSo3d0KDeLO6ljvw55SaZ2M5vKR14XcnQ+OI9GlA0NUzsyPa+6/NBPt1vwHwD5a185zoaMYMjsVzK5cnnfVB9Tbw/Vg0ymH45WjeDyKHShaqwHDmZpdxRzSGERUzgscZcoHYry8Zyd72ggslYvd/zIxf+EWn3SUYUxU7OxykLSISzu6rhRaBkP9b33AB4EHMDDoQVG+iaf5Hd8BgRAiceqqt/kWy77LPDmDD/m/L6GKkX/VO2wom6D/+lIKfaejD7dIvCxxDEaHPdF8eNYchKgbv16Ja8MiCkwY+9+yALW9fWAeoB7uVUZFV0JZ2FsaRD4iZwSqKrmikey0EspXh9o9by7704QRUl9GHglNKKdmdqlB2/ibv+okqqRKEDXEfqg9djMeFBqFsAAo5I2KOrD4X75ZbenQEnKV8Y3gBteFLMQbPhg7/8CcFk1xTX169BlSi9xu8dU+b14oVDe3spKUjS88pr25WBulaf3XxSiDpseH2T9Z3Ooi2wU3Y8vhUnPlxf6MRvY845g70QiairvmfqsYicqhN1sgYY9K/iV3bjRzieIDurwslOjn4louiQcbe0s0IUWR63U6NEOfvgD0saiVu/MfF1tzuKpqfJPZoR+JLwP9D6+sn/mHX6JrtqafBRPCzuM1GWTE2lp4/yDdEd9y4aPAxA+XDT0/iNCA8JxL1jkHPYYFxWQYHp0MNimzy0PQ7z2wtgtMjHOX2cne/lxnFcjBZhfqA44NMnBVblAvlkZMbytHaF3+zSXk3+Hr+cI+qPAbpSPrF3ijwf5FE9YyZfWZeC+c6dMq+JoogHNgyw9gdkXh3neEEYs6APbsHET3zCRZ50CuZER+dUZwfkPYognsO81rxhYZRdbfQLhtB7EtrOJxWk5Mmc27hyL7WmjcyWLiZXWPovpGPCJh5Vf78tlSaXwvqY/ow8qvkrAYmuXMwbNnBYfr9qhkCiRX1MRjQWFdpnmY8w7u47R3g9OTfzNTBathoSFL/j+gAoDlXZ50/YbQstmIH/SnAHWbJdUiRiO7VPERGq1T/Qd0iGtsX4fwzsTuXsKaYIt33cCa4Zjy8V8fB//yrmN1bpsM4VXzxvVxnayUMSl+PgHvXpOcFJaFHHXO8zwpUT+6qLQFwpvT9WtCEwV7birUQYYqbDa/I43cE5oOUy7LR7NchV/hBYBzSERftaSrZxmXvJAfimOfZly1QbAj4MbCO/RV+wUvTXMNeXlrpLp4OU1K3ABTAd/VgRhyDLyAvGCuDp2Gh6mid88QMxp0c1f0lswyy6az75oH/xuMTyXMykqWPc5nHzRzs9QL1g7H9bjVEkHFRuxvLkqzGDw849ifZ52xLR8wknmEc8vIt0dG9wcev32SfE88Kz0TFtBl9x+a/kr2zGnR9eE7ZpmzHzs4gVEXzthzImPJhcsRyF57z7CmJj5vQ5HlxhInLkf9ev8QA+6FldtXmsxQljHgREgUJk1bNT9LpboQjXi1LQZoOqCs8Ky3Qt6K5cx0acGAF739lVnfXQWZzojBpGHik247K9n362wqLdrzBuz6afG1XMk8OFB/3iFAwz4Vlb/Yx4OfKFdR8UMS9iFlH+2cYXo6uB7inig87FbViwYdKnkjbIPDgPc6jOtjGQwbYMJ5XR+Q92EorjFYPOcpz3F0EzZ1j9T4M0DPvvLXUP9bA+qMvVzutsSsthKuV46ihOLo8x1iMzyN2rlk40uXE/gsuLOIkfeI5wuXd2ty12Vf1seNmYJdcy4g6FI28ARe0QeOGkvrsoY2+d05DTB7goymMpLFs9vBmEZMLj2GwsESEVc5b5ykyaaFj2iREy84dj5Gs2CFeUiOn1U6NZFVCe8yE1oI5HS8YfEbZUucwfMYohMXeGRRf+gUibWOSjaz1VeMmoCR9ohTaEwwTJM9L5JQfKuLS8yrmYfIZlndMEwaNGEgl6tss/4ahPE9vQzjWR19ymV1kW/mS+MvQa2NAW8bPvlGOp7ezjNInVyj6ePDN1T6ZS42UDVhIdbEyW7b7tgoM9dLGkEnhZ0jonErsBKS17uS/3FBB3+kxhVJ8xD/I5aFikW8BvupMl0t9RKWtTCTkaVbMm5hIU2yfAr/QSK8piTQwprOoWJFhvcUPdI604HErYpVr8td5fV8eJjvCKiHVFNwZmWEwJwsEhVSvMCwZSmFcy8yDkBwO6VDleUjNhcpwa6bnh7eRsBn9KcT3DO9laVSxwP1hUCev/Lqrcf/uc+zMtxWkZ4iiZ3BiuMIGRPF4aviM91c2S6XoCKRIquafGbp3bvvJ57/aqe/8MLvavTgQjNzvE2cuHVPo46/ELr+Klg6Ibw+GdaMld1V8OMC/+cpZmSyN4aIj7JO4Xpf2vZfSE+7kAEs4p60s/myjMozRvZdysuFrC/JGm0d8Po3ow7VlcGqL3mueqjaQwLC6zKVQ8FXkKhFgEJhoBUbHPicQz3fmwyYnPsTOZFV9upDec4/LUz14cG87Pu9G3cu5Lxk9jmMSs+HYZWrHxAbbZc/hazh8fREZ1yLyA6JIvcmwFJyNLqIZHpwxF+XnTi1yWtb0hDOjZt/IgKzeyLMtAdMmyLAOxJUMwWo0sy7LMVTB33Yje8ibPheWlJa3CMkz8MpIEAQ44yTugD6gql+w9aTRUjKws8dJoKA/w6rKnc2+v1qxhBLGFYE2ydAk5YpcZPU9hpEJAa62Hblc64tf7fua1Kii+c54c6gTPAit5JWZRK3NwPTSENeopWIF5hKSnCM9OVzZ5abyCMY/93EIGU904j7jIMdcYUdGKPs9RkLVoxfg6EEqaZDMRxeUo1dvU7TdOwpR8Ko4zA/qNtWlXOcLArqtiu0zyBVu+AIi9INsOpRA8LdHS32IAmfyCKZxvC/m6ccByAyM9adouL2lJ0xLnVC5Z6uwNDcOij5VPbOc7WxYYMAU5cS1pttbVvFmt9mB1IYLO44dxy50ZXmyk/ZoUkZFmwvtdgoy7/LKIKx3CwgZokl4Sqsb79PO0h8sgtYsWM/8c8Tze5VheWKJA/U/GuluhlLfjkaLnJfqQiSHK998VyKZYXhKHnA3EJDpv8Th1hIElbIIGQr/b4EKPmx8lSrduWi++G0f9TXGQPvPkjg5n18z62w8KH0EdxjAMKZfZtINfWKw5CAi3wBrb3et2jHzgtw3TLc4S95vh/d3X8CyJTtd8J/MYSIuAPPUaGzCSJJjR+02pxKn0wQ5r1kixjHB35hTxHL6QNmFxzUo7o3YJcXAeEDOdvGF2/mLCIsUeujsuzR6yBUv/IBhchwBwmMXQjZRghf9MKSjqrCJsSvR6+GmunEX5tSPYF3rHJTkFd8uumK1whVBelcGnZMeRwf2THLyYov1MJBDkJxaWb8apsdGqdhwbtDMOp+kbY4JzEGH5Etlm27tC5iSvh5kJhEzryPE7TAtjxWd0KfuBd7+ve1MVrSAECnbxe3RO12oafeon25IlYVgQ0+iQeHE2FQiBRVgY/oB9ETnLb7Tx3ODM3IevRWmMg6FtsBCaKFP14sJnkaeC8Uzg3ADp2NwYTyUQ98RO0gvA0Vi4c/2BsfUzESwERhozDWM0cyyKvRqfSck3Y3IWOV/XQeSu/ZqxP8q9hL8rdEcNJ0Fk++M/6vCOhS1IZye/JyTleZ3DnDd9cDMyMDKMk/Mq5RB91tad+YkP2EQDG4h8/gp6ED8GNbIr2eolA1dcQSTELsqlckS5LhjZ4QpxHblwRADJZOu4KOqgRJf+Tmdj3DJBPdi0iZEU1RvVdML/6WoJ9u8xNHigNFP8S11RmDLS/lhoJcx0/PK2NlwdqpO5cVGR4rbRjWMo3ZqiT5792AfhN7/KKPVWaHTjNIxgsXylpnRjXxCTHDKtArJYYF3ebM3V7kXZEEogttuiWLVaR/4+hYK8uJMcdctUphq9zfYPA/H3qV4SsF2SynolXUqMldSbpaKb9mrr4FTOyWSvKDNvEotFPVPO2alSxcwz+BbfuJ9HUZUAgr24N6xIXLEqfPYs5xoTiWSZNuZARfXm9smNI8FDC8aXwne9QSyCPXlS4L84AgqA/lE4jj1iWMxuaaEJzludcLKpEic2hCKVoTiNXGvzqnmjOGk/kWmVEj7gt6qOQYq28bq3jzdw4avTMWUtfVa938KX8bEIL1Q3vjfz1PS0+i8uvhP5gjfxWpW5OhS4ttFsJ2Ypd8Vq2bx9vwKD+ftmB/wcDqxqkh7jbYAeNFLmkKjIJ8AHCYp4zEFbPk/pBFdX4mq+x5/01wqJJqmsEOdcm3pxFqJqId6/oVBE5Ot+rD4LZVmX/P1n1i/+CEGLTwlQmSFRU5TJRpWgTQnmxVxUalvWzCSwF0rwXAg6LpM9gYmyBFNKNXrM4jBdL6oNX31a6lKGYOMWffQdvUYGRf06W3BCHYquQEeyN1h+tv++77lmCyTemmxVeOziZnxy5HIV/554aHooGni60OjVpM3bFmxc8e0rPf/sIMhHFKaQdUyc1tJUdZpz7caUY7XAGlCIiqW02I6868cLbtP+JsbrbBrp0ugiQgfuz+Eu4nvYwS9M9QrvWpGOnOZ7kK0BtPb6O/+2alK3U1rBGvl7fn5hYuPEktX4QXksvaYWY7dKNoDZs7Jayabkx7h6GBhm35a8EP6Jr8oZuhCko8UQSFSua5pjfB15/Lq3R/7uxUfw0QAfb8tA/8Imc0DDHm9zPHkurmzTaD+HEPelAyXW+KC4QiTH6+BKYF3t5PLL94Vvkp494ri7c9kJQ9qXbjnfWG08pknpuucOXtS2+nGjjUTQ1PCFk1pDeQbX2puM8sx/TPExYn3Dd2+xoRY6t6HJ9yTdhmIP0Tu5wGr3eymHnbSctPA3QP3Z69L0KPH8FVp5t3au9MjJ+L13hyai3XWcgtwBHOkcw5Mc9Kn6KDLw3NnOdd2QryakAELA9q/xIb1xFDNUHddjXA8weRx1TPNMNoNCbW6oRBbKDWMtUlYGdrMzGAtJ1v8bIlJWfkvwuSIPBSGJ2V9jen8XtRDGCzhxXEaOKPXf6mX7MBEbt0Nr3rt8uHJMEq4Rchl4pM6I1H2I1IPyqWB7sdE6KvVWqebSLLcvOmt6mqKmaK9N0V+uum7uHs1P+tyIsQsG7PXy432b2zs6nFReh+ewxhwKk18khQCY58JPzLAPqTJL1eutxnhdNBqql485M8BCxjRaqrdcGb5N0jlyssMO5RWbY+Z3B5mSw8TN/G0zO1jqZKu3nzqwkErlR4gZkC8n5TJpFXtR+W56JTSo1RbDi9P24YMAx16c1NmPsZ4zR5LEOx84TafcvYjHPFjLY56lgqiETCrY954VPdn/Zou9PEl4JAX/LRZvI92KozARSMXobV50xLH5AovzzZnabxlmgxT0UCLm7FQsFJ5vyuq0WOR7GaRWW3qVdMao+7U20Mp3D0iTruur4qWwzASsHAcM23kQ3zFreImMIL07xIBC0XE9/fNf6CpxMaVOjpo9yMdaUfzW7BdTREl8MWmlaQDhTSJDMlDtsrmBJRgsZ1TneHiPKP668m+lgCXD72urLRJDnn1SKz5tO19pu0bo8b4+XsLWF7nAssSfU0jcSwWAfpu9ppt8UvlWgzOaUvKb427yw3GH18K8Nlr2MFKBvSplbpyKn2xfXmaY0s3R8CxtxfVogChtWiQlaYA8TOU8EEtRQsl0xaZS+vjWg460TJLZp631LcFartiNDorYXdoxPE3ZqZ47UogqN0c9Jk+wzO+TtAmP8W4So7AA4U19VwuulNAJBWyii9CjXiyii5dVVcxit2SO1IVCG7nAJLDUI7fz9Ndw3H50oaePm5jmmsCZW9o1bv/kbpsSdAwwf5AV2RrzRC8Csn+r+iPRCjyaNnRB4cFhN/DSS83aquML4pKYXvqHiwH65styYGTUNSrdfammv6Il945WHnDyRDyeDhOEkGgiJhpOzxYT0h+hJzcfFH7EOlUjQAKu+2MiJwH3tOlSpglLvTYVfcX7X19yUk2GE8vT5JDiM9R9XvLCH3Fg1CHI8FIzE/DOBb2GtSWCxz4R8120ufb8UyWNBAS+cAqWoBTulL0XXmT4QyQ/O3WRCh+QPP2e6vUqh3Gm46MrHNWS1rD4ssttEByGqKCGrbZGxh0fKACJoxfzLQlpMPZVS3umgd4zqneBBa1qWq9TgZXWuGH7dAM53RIiTjtk1Ef7phiMMKQlGfrq55/qx8b8x5kAYNnhafaBNeS38l3CzjCFNDGUqpUfTIQbwpAGqQiWtuCpYUdX4amCFvg+rsKvzv2sHyzzcprMHlvF84LQyNcg+rjCW7TAxvOTpssFrxavpt/JaunBJdmFo/O8rHiY4sFfll1kuQhNb+GigNqvAFEXTMtxIYXktQ2Z4pGS7tDcPT/2k9yMKJSp6Go9MoWIcU+Tdh3PR4PRTrygrh0w8cw+7rsfMP3UdcCEGQc6Ljw0I9DVLqGU/cv3ES69XLJfgqYiNmqOAfWtUW68AOsToIQrvMJASpR2rDaE91Gn24AqpAotMnzvMHF0nLdSRz4fP+fXvHKQdqTTLSerGUyKmyb98E69qQw/hVE0EhRv3gryUBpsplRD61hcUYDdjBgU7NspgQ8S5m41wZcZFr/yzonvL70AloI/HOjzYRm5epzSKFiAt6imk06jVFASoEMJSETBcPxhjz9/zRTgZUmnLP8mTXHEBUHCt3Y3LT8HA5Zh459yky/5tpq0auddaB13PTKH8Py09FwUF/eKQ3cJ8u+JD058X2nAsnt4eudqZ1L4E2+0DNLnbSp3c9rOBYxNzB+O1ID3iVgDrsoLtf4c01fM3zhjcbNvee5bHLP+ieIeVBlTspDpoi6JFyeWpfL3QYGDGBcHMfvs0TEDjPIo8hiNkk/7WBhMpqCk1H9TVLx15jMuDrLzUdrFITXbktnbNW02zcSDqNNURS/Jb4ujLDXoqo/5J0B6iyOb3nZ9Dob2+5Awhzm+6hsWhrgGbH57vO1gnkPZeWjyHRxzGc0N6ad3aaieyF20vo4eNYaFzF0N6J0raqWXAO5exwaFblLah3ePx646/loZ2i9P2NbD1BZ8E1AG8gMSmY9i28Y10YC5JJvECpmnpQfBdk5YP/iglxvhgmvHYihPJ1mhFnmEhEOB7vK5puIJvhygVJV2oSejiA6WnVum/X+2hQ3MB5Kz9BOnT4cHzbfcemlpOgst61ZZacpdul8Z32r/u1jGAIchSS6iKwUuikFffOnXqCuYt8onrCImytzNCMdw2VTVjI2rnAsHKtOBJrFHn1RrVSRexyvn6j0LFCjshFUdHw07YmThK70TRtv0YmF6GV0NhRiJim/gdUvdC574pINRwwuSSqb2FwrHALdDzTSg4zN7/14WKykL2f4uqRJ5LxTnJN+nw1J1SYFjjW2cPlu/AWnKtmnMy7D+P37GkHt5G6RJZiVQqW0oPiYlpjFO3RQb2l0wWjblGwWzdeit9RFuITb2jb6usCqEBrrSfPSTTm7G4gul2LHzVuHwId9ITwjSv82kr9j3t6Kp2Z0Jitht1nhRO55hbogGNF+nDyps+xd1nsIhWEJXrybelmwLnU9l2VgLqBqvLD1rG1oyOC9JKNwK8R4uODQX7g6vOwPZxgac90mDgzAu0/G547BvGCX1ZEtHZunukRE/BuoXcqm+teUQUi0/ztkmuhFEH4iqGEvS85M0Q9jt/St9BRC8thU4iDvfo2Hkq90cdfmah6jxkLwV2AQ1QqozRUr9+5frSzNrp+ZXymEDNvle38J9FuCR06b+3YopAl9XZP7kZjBTW0ECEs5FsN0qx3IVlyPfix8XiBqtv0rNu0h5DKaaxiAS6YvC0P5MeTrkmVY8aOfda4mXgeH/o6cE4wieXHIrt3ISMFWqcgxCaV2ekdgBzcX8ZOQ18H8diXspS5VvZshKIlK1iohw9ZbktZTl84rt954a4dnQmYXP+neRJZjR17SoJLgdIpHnWeYcDFIM222ARRADOT55FshQTRo2ul0puFcW+d1N3jUwTNUBJ8UT9EB30E3wm0Qe5ELYR4IJsUnF//0VsO2W/LTo89g7r+xPYgUdgmBrgIxx2IcQ6K8dmHlEpY7XJl56VubQa0QRwowUL935AkqLjumH2ezxuNhM/sKImvUwO8HCJ1NwY73AlZks2K0n4aLL02Sxpb9A7SXqvuW3377nbjgsgXTdR6q3vpcrSYdXjn0Us0VEsY0V7Fko05Mi2gMo0yReRByaLI+jvYshjX3fPgG0FzADRZtvGH396wKJ3RnOWZitOKR03rKhn5cVLBlewkHsYeJcBW/tJpXA8gF48CYNpVe6XveIvl6iCRtCbcYhbJox2v8FcdFT9CcOYYOCRpdsfwPfL/+2mAc0CHBEfOdhvxrXbCmvpc/4/T3m1iEDPns8/cZCo8WpgqPaGEkhnSl/v1HSGYNrgFwiKlwgWMJiWRiqQAALVGbi/It90rnEIl43+RdScVJxSe6EyL3rQOqcYindFWIB0m2P2z7lTpL4VsyjJqO0DrEPoHo+8jtACcvXaFAOuiMKwZTdB/Rn2IsYi+9XF2clsWCKwklzpkJ/megVuwWSliC/+2u239447koO0JRiYpdeh1jJ9Mwd7ETc1jl5FJg+BMaHs+mblPMboFHyLsQiA5Nssf3yvRxvgE0ZRZm2x1NwiMKwA/h5CmYDDKwoDMav2oo7jRs5BoXRW8eKG7dCawdhJEuJt1sgXGa7ItwQsuUcUYMNAp5MxbQpK5uAIbs2sKKnu59ZgJcDl/ow3WOPonz9xcUE2f/C1l7DfLIVPKmzhM35IiikaNLFC728ZnELD1+bS3+/LnTjtRzKnLh6+z1aoZwZ+X88ZS9pmcb+dZAx0jTaDNds7S4W++cb2HRfPz4e/6cj3XKu4OUdC1rPztoWCJe6aMHWoEa1VCHDSpd7Hny49mYssJPznJNl0kjz1VAHghnihpCFharKFRR85ESGU3O4k8T6XffFkI2D25hwNHDBBKTp1dBsP9uT0ucqUpB7GLJaPvXHH3YIcZ+c9P0SW5rHHuwEuzhkhZp2sWiNhTQD5irrowXzVKNUkI1SQY7oEyHpKgELahtXRI9r8CU8oQzqqU9QYQxFm5uHT94Z1FNfB+E2epNrdOtOMe/xH0xdusaf9nX+5mz0s7DzpqRuZJ1dk0VrRkUZPqSGMvCqQoxULUA3/iG/4Ytbf+Kci/Y5gx2+za13D6QhEgvk5yi05P9i11tyjnFn6mjoDN1D8nlQdCvzb1C0Vg+IpDRxQ1odxAmpdknB3ciCtZ1YH2Vx2FKj/7au6iWm1Xto0e+mt61UAU65080WTy8ro1Hc/2IaABNkreK2b0HojJ5DBHt1/rl+GAhT4iGiuL+5qjs+MSx9jPVZoIANdfo1qstz0VYftbryqXA6vVA2oQ6378awrgIMFwpBRcxLjPs7RJTglVvMb+m9XGjMBBsWsFZHEmSy8dzsDnhhLTaIuHaTRbtDEWDhOnmcdMNXVbCCrNuh4GaagERzZhpE8SvO/eZZyhLaxY6gIvC6XxgIOzFzcDKHJTsL/OxboMhxNxhlvRxlgD2A4GUp4ftq5KmlV84KYAvFGM5+nPxc+X0VW8czmH7Yu2cSV9sw1hyqqOrrTU7NKkFeJrJYiKXbBbYYLnEyhAy1tt3wXBn1+pItMEiuQUyjp+1f0tlYq3UWZLwGSZx30sWGfBW4rsq3E+9rCpFOMSE3VRVDjH8gpz+XmfglFdNnHwyjuYVQgjzSDazvSM+efM6V3xif6U8tzV9+FhVkjdyjjQeohmWD+eV2jSGRBKp3ne+mv6Cy1gVqsvpzFxhcVE3YJVR/MN81oXjUWrSJZmll1cmtga+uQ6u3m7zetefv04CCMxzSFf/Xo+bnPkmYXIkbBConyOE1MqA5cJXGIzWhP+2Vfd53AxagiNQ/j+Xj/bWZnS2Kw8q83WIgwKgpKLEKulVHhrgh1m4pDYSKdPyo21I7fVUuOtsRVfRt36vR1gWhDBz9tTN81nq32Hjte13Pvh+peR+gnDiJ6HtIVQKBcYKIio1LLAM6lm24KCgggRhpYsn+bqaxmV3zv5v/dCiMYhnDjKhYzeaEL2uqLljprET36ZEyuHUoO0zRiFOSYrig8Cka0MfIcx9ouORZQsQmo6TJEFjlM0Y/4AvfKKRhFzFizKGOaGJG+Ywy9+WtvUN7YwVwxQaZ5VWcERzbBvd/Hv3xoX6D+oKbDIZx7q+79qqov9DJHl6AX1fUBePXAoOPy44DU07AqSZguUKH2rDzd1y54oYklQmUw08Y8B9QzG7mCX5DM3F+jtDTOTox4bLE8VzIdYtNGyB6Y8bb1C18pKHQx7vcr4qIpmdi1Bz/oMYBq9dfCeCY7o/CvcNbbAdOuUMnHBivHLiefSpaT+471Xxg2vqAnCeYCxU9Oob1629gmxgodz/tMmUtRu5qbk5lFAGRFWLbHdxFI/8ahUC92+y+vOgmWHtcEQGbrJwtY4NtNlcSxvrRhVFJS2mH91VpWvmCd4J1o+z6YIPtoVErjztA0XvfQNQMi8s9fswgG6J5yxga2ncmtOBC3zNE0yf7T9OTrQZrnMAN60fUfIrgcHZnObxIn0WNEmpabJem/QFgTc6Dn8Dzp5ec4IhtxO4A7tdNsg421L+INxJ3A0LkTfyCc3F/Sf7+ydSD+64A2i8V6EiEhmhiW1o3XQgqslTVY0uPn6aUVBxKcsEx5SeojnliAWMpfH+raFRYEQo76MP4UQdinwriPKnBaPb+BLu7rb/8EOlE/1v/aUTkNLv0+L/OA1ILthfvQfQiHzVkuE0KMzJUaOiAHPVAQgk6aSTFS+eoZBJNLFI/xCgZw7XRFl5OfEOOaMxf9rep/0UWUBawqbeKbnKQz8OmwUE15a5Sb0+KiYvBEgXFJl8yh4Qi3x1WnAjarOetwckFWQSE5IuGFIKbjCwRfMcRQ9Xc+SlSNtv/Br4O/xcN+qPYcP8kLvxXzWMd+h8eyH8W7cPVPvki/Gtt1+qU/0JS/eF/zl+Z9v5NmGnwf5Ebk5h2mua5U/8XkvBntUHtY1G7XP63v6GXVqVaUFl58EkbBcf4LR3z8IMFARrb9dfg8r/mCUEATfS/P4+CTJJlS/z8EEXooN9erMiPKSjlDuNilq7phDmv6L2jNNnpyuL0H62CCx8vf3cZ1kD5l9Zw3rPsz6QwOdUWRFAOGEh5qE45kDQKwlomLAXRsczy+L2OK03XckiutLSbQSsdXD+5D6AAx17ELgTcRkCXUrFuFW6ENK8gDcjkBiKIAhxHLt+cnCKku7ZmmjpfQWROuN0qwyGlsqwxPz25BzwiAR0yscSmvuFLOpb9HzmPuql6AqUGPo/gyy1khQohC2cBdB+sdCZk0TKzqRXJRxnHA+MKShEtgiV00ei4bku1VVCRH8Gc6DjMvQLWXGbCBYU4iIP4y0qT66SGk65YT3Oil7K/NNWxCTo5E/IAKpXx5bMXK+Ov6ypTbPj94Qvazy2D201BhuA8SLH3+z9chqN+iTe2CmKxnxFVF6pjnzuUjuMecpm+wJIDaB0HzwKzXGLEbEKXXBamxVtAey17AnOZ3V+oyr1ZlqKmeXjM3yHO/Id/pMC7eWGNL6kyLcRSP1NcqYwT/acUoBptCthGVid5iOHnNN8onr4RWyP+J2ZdKQTjPbXubhN60Yu19OfMWABxlMPHdeXx4dlVowOOvqpncv0Zi02WlYpeQcqCtLrTheem2MpmeZBJlFcyNlL/mRBYJ9SLHG4f36en4/v2/NYN0/RIZDbv35AUcAwQR7nhtnInO25v0NiTv/Y5pMGE3ozcyfP0jnK4xcppBTD7jm9mMsqPGFX8qGAdX+inSl+DFDtN+Dr11Fcm2I/eAoUy7KzRpHNwSQZz1rICtzqH2jNQW+GdGau4F004seDjGa9zauUD0ClUgdP+1fFNRSZH12py+lrIMgkgrNNgQJsA2UWKBsfhhk/ugJT82WNjatNoP9WA6p6yZT/jvkZNRUa05tySGOlcEJ/iqkzEWxgLFp2Ih5AQPYe5qQkS7pIu4wH+MvNCq7Qz6bmWa8J8cMVv7Mumfj2/ZDTPRof00NaFIucoNxz3jfKxNAlSffVDDOYFwUDwmdNuWW/VK/QsuXPrAxJup6DLzEqhf2LcA/KQgEaK2tOrAM/P3Ak+s7EkBz0ZNjWBVOUE1BG1FfNvRZdQWnL6CExX/06ipi6UieM0CILiyvqlDERt5cEzi2+/ZEAu3532sxoB9MoHl1ttbViooNU7wT67agC9lC1xmzhVrlI6lN7q3qtJdGQ86h6ZUZMEZq3FY2G4XkXRpkClXrpmNn5wX/3RyT8Zwd1w3aQd3Zr2lT/8eWmzKb9tUyv1be5Xuoct8X8rigbelWBxYJxG0rrDfU6MtSc9LcFbm+gPI+7LwocWeNvR/SiNFxPbtiX+dBhaI9qOtCt4tWelx8+CizbTbUa6f0ry1PZsl5HuY4dPbWYzrkjvSvCXA0Gd21U81AQqZPjmBy3lWZnOuwDQPvdbqef+/w5GvWlTZE3y8Hbw84Pm4+GJ/7aQLPfcbyHaqVIR/mjHObkpSMtPfNn639XrU/rShNV5je9RL3KxphcNTRVOt3kCVzW/56Rhs9c1MX5hFqOyOIq9sXkRzTr8JU42/BBrm7Toqm3zRxfOwkfd7JxPuqPzxalu5aJ5p1KjdPLWThrkvV2WIh9dtrKRF1dRXsunJulCP5qWMun3rlu51Tc3UT7oT26lW/3abZZmVe1bebIKX6J8s5rdSd+tju4yvVmt3Lf0y8Ah0h/D5EF6ZDh4yHRiWHxt6bMhfYv0D+XpYelOnR+dtP7onE/R3zt/9f/Zvzo/9bn1b7bhd3QvnO/9bt17Fwenrf/sYvEuy42z9NriyXrv+5+49LX6HP1fF0en0V9YH/1s8U5B+fur5BzHVZ6cfpXLN1lbam04JVm6aqlI/YRksUv9ho3FIfkWB4tvqeCus2eXjrobrJPpn8ZHi79faMLO0pzChNGiTFeNi6WYgsZkcU6ksbV4SpcNX2wT00FTZXuX7pqc7WV6c//+l/PTYZXru7xtDu/uUMhfw4Hl+ln+sIOZjiD/y4c/rb1VHwVCa+eqyEOV/mUo4vQmEarXQfrJ/vBcySdNUUJKKTWdpBubJapLgkXuq8ulQ/pKkNyD7lbw0ZgAUz9GXmUsfc5vHobKQ0WUcdOZszd+p3E2E13rIsB+CIbjQnnj7CDoRUl13JMN7XvVduXkhFCXfiZd94BGsHuWPvRisd/AWZKzNrNJ51DdYwdWv5omqx0zQoBEj5QgBs2DO8w7nflx7GXBmJMF/+BEfFjM4ryc8sD8rwwzrg4mgQlNF8xtMbHpAZyr3eecuXrRJwPk34Ehf2LMYYFCsc2XeRH5qLsRz4N4CcGYuNuIcvyxaQvpAEzrTqtQ3h+uU3gdlL3mCDbvDacxncKGwIc4v6Rag34kJZV/lOtLbbK9cPv40JnumeOUL8LOcs7kTwOtzSaY8d0l0Vu4AHw22YjrQG3QHkZousqiDQQ4jN1vjTq2XcJfEslHK6j78Mmp5ZGqAhtL9kSL4p4hapuzf2JIkeg3bEddSMhOL7k6Qv7UjtKMtcCDsYTk7BhYEypcG3YvJ9XpM4W+aJkMw4m08V+blpEGfwDAbXsMqBXkAVEyHLUAynPRLruWtAbjcWJHqABlPkmbuZ9d4+qAf5r3rtRJ1QnpbYPMxskK/ig7xh9Dj7YdI8WXEFU4obMNAqe+jMORhKQwv6rm4RKSlph/jApLGT4Dbvs3JI6ORPKVw6qsvXPo3gJTRWznZto4CKT3Z3Hn1ZnrO6MoFkbm2X/ji4HtCT+d2lvyowxX213SWil6ec4qRoTKhschdt/y5zDy+JY7yBJNK8FRlIWrKMilxrw3jHakmFwHns+DvLc8nZzZCivfJMdOYxkiOvoh5djIApKz88xO51W2dwBezA5hY1adi/MLe5kiYH3VM06ShOWQBCrSkXVI/LLQeVSvtrsUPofwrUedtE1FFQlFZXxrecnuAej92eeZy86XrSE9200PC29OLMEKMk8ifB2W9LxZgJX3In5Po9TrZvu9G5/uXwNN1Ghe6hKsIcobD+8+F3m5Cad01MSGEWCRu6gxF32sanZyh5DtZrGCYP30sg3pkT6fwyFnZBWRPJeqx0RN9jFr5lY6m5872so2U2H/iUVg4cyiih8+jNgMGzi6LnFZ2qWRImmxF8KrlwWtinmf8XgCYF9zco08dwCdluRcekDV3Y9G7dXQS/27GdCEx3TVeVuS7F1CFSpZacYp7ZM1IBj6Q0rHR2dcGZynyM4yvANMG8wQUfyRJ6osRVss0uPLMmge13p2g7ciYUqJGhUn3vGDcvXKE817XMtO6ULFPUKiREch/rHoBg0IgtqKqLpoHPxJVSZqADbY/7tzRHv2XTNU6w+IrD85RzRPjhiePP69MUZHD0zpodfndTrIVksKh5OQthy589b6CuBIr+2bn5doSowW9GY6iGEYhnH7c06TF/mPXfOz6Y6q+/Rb0Om6O5w9SxiIVMByPUJwFCLzH7M3JgzRLi96fbf93N+d1D2nUgfYoS1UmlKzW8Iky1mSaEWdZiOB9PbRl9EIjKigX6f4fEz1xGwaKa5dmIaClJDYQ4fIsjR03/vX/DGw7hzhlMN4EmfNM52Z0wHZ1czwE2y/5IMMoQzz0LteMRAVFjFK0kRxIqH3nrySZG5BIHYsBaas0Syoy9Is9vyOaQo35MjZQ2C+pUmQ5AL+ncgMtpwJzaR5fjhXMyxfPj55YT9MmUzbEbhDGJDw58SiOhOXhBPE497DtPGMXnybZ21AePeF6Xkuw+6ISBI3HjNDLp9hLoP7HEQt2J0GWhfjQy1kpXp+ND+SQLpNbyg7a8EGWPi9hiEj9bKwRQZT1ML3a9oS8yUe5jBpdvZ8Q7fPEDbE6blR9JHBURrRdcNMK9fNCwKvLJJDqdnjBWpKdPSMemo6yNy+i/Nw1sDpBuGYrOIjug8FXvpZlVyyoaQY546os/dWSfquOL4zhHHZe00cpekbX8m9au/HlPk3akJJfZXzPlZgxq2kcC9bFPl9nTrsNJzijQT5hGrU07fEm/tRvFmzDdKdR6TadE8k6dExYe6gfiCLvPWIm247Quvd0fv1FDCfnITxlnl0noaQHJLVsBzQsRb4PZK8szy+l6Gic6vwxFr4BFyqxIwMF97eZzRwlnTsuYQuACR7pM1rstFnd/qp4WGWQ4XmxUdFSb9kx7obX3rPoRYN9ORtoRJ/pfs065BLVAKBzK8rNgQMn71uQt4ZX5StY08D/Gf7QZNsTrPL+ZvQ0aGy+Lj53MGNmKwWPNkR0fy4skxEj6ovYxaRsGett6eHRZIhMqAzSWI8hT1KmVELdNG5h3gpF75nfkAg8nbu9EhcWncozf8cRC7LEsVGyURguLAT5/GORn64KDTaD/UHJEZUjyQ9ngdcXsvRyV5nMRFs5MoJ/RwS5PB2GpXIQEk0KVUdhy/AJIGBdt/pMqc183ecrYcWRGDMWNXmeVlkFr24/+wZ/ybPPQhc8DRofNfenm1Bat5Yg0CHbfF5NLECU3CBSlbI76I+YI5AmhAislNBkQ9q0snQbd/j+Gzm5duOCMFOOglB1ig3w0xQFH5nfljLf3YwcM6u0xNE+rudvCPXOEM6YtByjue5as5NizF8Ndv96Ty99NCBi3a40t5FQvb3TcfWuURvo+kTC+aAc8IH14m7NDZIciFFWHr5YwsW43veJZfnZj54VZyb/4X3YcYuz2R9E0i/NQQdGzxpe4IRWYXrovs1XxHNrV8JyLYt07QkjLpZbhL83NALXWj7MVHIxzhpDqpNiF4u3Amz6XwkBGfEnzAMwzBix2Bn/bK7C21JGi2AYmbWRPTJAgjPXczBpdvDJsqZoKIpzKOzk4sxdExgtaUvr2I84Gh/FYkzEvxrVjvPQjqt6V0ELwA/iqSERYs6v5jdThx6ruju8MwLzh7ym/Zf3ktnNTEFDChieakbB18RtHegAW/IMMwcBLaUd6IhE5SLM/l2OlFV+BiiRlOmJtLzffwESfJtI9dhR8YgQuSquKseqBJUjSUS4kZ/fxbY+6cDKmlAdguJ2gC5nmHsOcoDKplBbqqWfnuoyp7U5Yz3y+znNzzCo/NBkm1iLAEykAdSQT00JxLIGkFQ88G9VFchZMOeXJKOpkTk1ULmmZZdf3AC/OTWliHVRlmJuMUXqWERNwyTxBX/PcMm26d/Iu+1tLp2+Hq4WdAv9LByDeIQSD0ZFNecSdz3CiNVSowP1sHQWlVjtXSl7YLdpgfuTz/m0p14mz0S86JqzmfhC4larRFwDXrNxITHWysPbty1K76YBchB2Wa5z5rUfjhlUNrqZmSIk+X7b858U4/un30mEeOKJolPGuq46NBz4R6dhLcf3GFObZ9Bx05u4jDxzUbYKnaPoa3wt/W2DFLN42t81v5zeyned79ESfAqNU/mHexa5vbcK7hdtAlXSGs38x3kexfVQfVZyl3nIUr9gkOj9LJ4oxpVaC969ZN2gwb+6SCslHFi//uZXK0VicdDgR9niBfAGk6Ey0MUJEuwcUNmLB6qZbqhKqiGIeDoy+M9g35mS+RQQ+xFbuUMUfkLmUaX6XeO2h8hseNVTVNIIGp7f1kI3+TnhHpVZ4usuOSFXYO3xrQo5oQfibm3i1Sa09u1lWVR+Fc6adAeVo5ybHyDd435U+hzuZ9Z034WtDwvMC0gHIKdUTdjNXqeYKvZjTy5AZ3p8MOT8pwJz2Z82ImdBcTiS9o7oUN7v2jKgnE3bY1ldYttAFc64Cf0PQuYYOxqB7q3k8SpWM7N8bEuueLFpZRb5nOq+1oLyUGTYilMsLYubhdOTE1mx2/9xTBQf1S8k8IfBwEPpxob0kO5apfqghc+yV2pvWY/KHq9Bao4Smt5X63ZmHHOc/PaGuwATsuf75hQNRgXfymTy5+bJo2yYkeUs/Xk4DfPfB/mpfJR5TC9cX6K7gKTILTwgLdSZNGvTvwZ4EnsIokCgcerd+RtUA+da84o/+PsMAX6l75KeTgr+pqGCqorLGhJLL7G6fUDCEtmbosUDXqBeKjU1F/AWNt296I0dkvipPQCn2bjoQej4wa0hrz3GmBS1MGM9qXzXGM9idt+FNfIFf0gwJzEhc7nLmgxLspY4esP/bCs71OZ3C/AK2xW7b6V1rCuVZhoiJhxXmkzfnJCRTaLZhaHYRiGb89RvxvkDthIL9trkWCi0XFEJnAlomoGH7iObHkNd0UAShiws98k8PLZjkFnxQH333/i/jtXOaWoG2owSWLNuIBqf5/0QMvvJBovFuTl0NWaoqNiKzGTamoxM//X/6h+hl2Nj0Z8mGw5uUvT8NRw+SjcUNBUtPq+fADzTwy9leJSsERd+yFcTYIpZs4/vckxj5H8w0/HIQLY+zL9ocS21NCMFdecph+Irj5caqxDAp3yRMmMwLfpFCL2wbevH/DYVoO9KjaMesthwRN8uAwIxn4sqEcCTqYYNsjj8lRhg1IjpvDlmpWIbd21+PpOK2UJ5DWIYJY6V7KBZaYlbAoFIZoir8PKyKOc4nbndNKtGI+kcEuYHH9bglh56yPkr1ANMTMF50gyUDYG46Oh3LjYmzqSmrxq7BbNUGBbNSruuUZ1ymr5Jl1wc6VbWuqZJNsVqP3SfcCU9ab2mRg5hJoZ4EffOXO7XjcsiWJwXIsxjjvV7tuofu3GMgWldZORVz6geut1UCDCqLFeCX0HjmTEPk5ieBk6hE5ggJ4rkGIpyygddT4WrPmAhO39KKRxe437IlmECZByn8C+PU5WVEapVzdPIdTjBm/j2bwK6/3BZp4NhIcONTXWl3UVlzTBIaySGHvpeF9YPpEvn+/f5chUDhf45q1DktySaQy2AJsq3feJdaxA4Poc2LH/Pl2leB8inFpc1FhxiRcrPtwAMD1+w3MbX8IG68n9v8f89Tsc8I5pWR8RUmghQ4lACSJ1Ij6FxXh1vaVTKNr6nP1pq/OI0+vz+FL2kU6FpA8rbUfXS0fU6z60iWoI8WhqqOvU5o30/crI/AWtH6daqE40e+MO/QOrBWVegYE69xviSE9sEgExAeG3gLBRpuHQ4YUdfWrLLdwO0+31WrCSOoGGIFXav6NVkhi+NRhrT4FRETdHs9Cgg1Be2bJwTz6aUkwCDNUztRLUwXWd/CAKUhb+G3cT1Fx2M1UvlzkKlItwaBb0RJ9AN1UPawTNFp4sfzVAivDWYRTdJ5Mb+oTE1BObnJ0Aq9m7KCjJ2aZ2E2ejh0m/Q5nns/DqvQLEkHogeInqvuWn0aIAnTPF1/lk+KZKx78hICT5ieF4awcfyEXKdsAXPQ2X2G8ap8p+gOaC3REwHEQOCqAh58oGZDS1woej/g/ymYQmvr6YTzLOcvN+NyCPaDyfS6gt9dzYNQ7QmBa6xjlBQXx4O0NVL1wyGPqtLxtbrGw+64+Q3NXwAuO4ghC4TEN/bzlyTWwFdq5kr29q/BikWF5RUrx2jpPtutsEtdfsPPTWxhvot2pkOisOkd7y9QrO3RiVlk4shddHDqbD0IH56O6sCxyJuepNjmpVYBiGYRzqmlqqcmzTEyJ89f+vUArsP/bbUc7fmz308xQVtwUrL1s9baOSOox2Cbo9dDO2C4XNAoQ1x1RBDg1Ml+p0dMUf92icI7gI5YFC111dA6LIf8sa7EsPHw8+H5+/7uFBfhcWncvOlWovnPvy6nRbVJuJIsgn3pM4jDZt96F6+ZaPwAEnWu1Mq4EZX60YgsXsLMsTM5FbE0dsev1esFIZ+T+uQsQA6MmEKjAuWSfizC7yXgYXonJjk/OPYpWqkzdQwV+JDe6BIf8L4imp3TzlJ7zmlGMDsWUjq36zbCR3HrCTznTITdDZrzgcASvljx5gO0anNbRX9OG59SaZB2kplOK8ujBz+el4UwueLPj+5PwOx7qqW0FT/YCdSmEZ3rQxI7VKfmwNIbIJvIriX1YHrj4rlwCB7uZIKZQwFyZspkRo10xSNv3p2ecGmXt3lJCXwlmXw+9F1BUA5Rol3ETSkHfRxRbvdpT/xAagK0G6C3gayGNiTOTH+Tn4t8fksUIg/P305WljlpGs0niZap6YPxlFIz2skDBTDlzvRAOcSeg09iHY8EKsVx3X5XMqrW0QnWTbzZO0ux9fz1OpgguKgFKeEYXLguKle2g8uEzyX2+2BpV2cyp0Yp6/+9TBP5vM4/Zh4VhvcScxXZGedJUtO6hIPLk5H82VGdcbQpar13t92xluoXX4evLpc8RrmEnFV1Ou6PyLpS4W6c+XyMrFK2I+tK3zekWNU6iTknhy6kcOJDJeW+reWoAfxfj5C4MTOeF1JLVR57B4cLmgBqI5DtF2Agp4KvFq0NAOt3vnKP/gVODpNn4FfWeMlsK3F6wjwFpQ7O/H2oDqZZKQ3oV7uEdR+DrXDjPXZMZNg18arVSgqsREbKQByw14Eu/cM86e/n4USengmBgiUcgsaB1cQBmHzYYkExjnDl9qO4OJburFODa6ETab5ULgIHGTa1w0fzDsd0qkjr3OGWTr6ymPvtJcHz5LQW5Ye8vYh8weU9I2pR+Kc4G0zZ599UEhKPZjn2MkPBLzAOjeSuiQgyBs5kaV5VLWCKQaZ0JaTV5Mb5FGZrX5k9xYnmAnIxxRNd9T/fgGhuhkjcgtV+egaRO/yilOp+y4mlaiGPDoYRcCcwbMPpE/ELKEu8Nv1bmSByvliG7dOAe/X05KJZVXFA0R4NPYCtRKR38SeE/GSc0rK7JiGkOKthTnDeVIlyl8WS9EIrKZl+0K2+OIODH7XivTxPX/SFCiAx0oJkGl72u9horwh2y6zgvJNTveuokHyFm/rxCDqKLwC1lmqeNNM0NOeXa3Xsdu84/mxSF52sAm1DS+lZqEAJ9zCUhFcI/p1nKP1bssk34GpWWVtsx9SbJ00auMqx3eusrcJnIbiPtSodKNPbEpwcZRPfSFX2cRKcREc0WxTcEbzd8UqxS6E81AMaaQZpr/UnQpxIbmG8VFCoKmNUUuQtfRlKZYj0L6THPSFGkU4i3N1BRno+AfmremGEah+0MTTVFehfSS5p+miKMQP2jumuL8VXBH89wU/VHoPtCcN8XmKKSnNP9qFPZCbGl2TbE9CJ5pvjfF6iB032j6phgPQnpP86EpuoMQj2kemuLiIDineWqKfCJ0r2k2TbFeC+kXzeemSGshPtLcNsXZWvAvml9NMayF7h8aVVGykArNEoqoQnyi2YfiPAt2NMdQ9FXoLmi2odhUIT3QnIbCUYgXNJeh2C6C7zR/hWK1CN13mlUoxkVI1zQfQ9EtQjyh+RqKi0XQ08yhyJPQDTRjKNZ7IX2l+RSKtBfiHc11KM72gg80P0Ix7IXuEU0XivJeSK9o/oQiVkL8S3MfivP3ggeal1D0K6G7pbkIxWYlpOc070KRJhGu1MUxjcapdFEuOZqWOjqbRPSX6mKbRt1Uuuj+cvSWeTRMIspfdXGaRhdT6cKGo8g6Km8iunN1cZlG+a1E9Occ/ZN5FCcR/lYXf6XR+lQiyk+O7jKPzt9E9D/VxSqN0qlEdDccPWce9ScR5UZdfEyjs1OJ8Jmj86WONicR3TN18TWNhlOJ6J9x9K+0kYMIg7qY06gcSkTZcbTLOtrOIvqduhjTKOYS0d1z9D3zaDWLKPfq4lManR9KhFuO+qyjcRbRXamL6zTq5xLRX3H0Ieuom0X4r7r4kUabuUSUM44eMo8uZhH9mbro0shp6aJ7w9FT5lE+FVHeqIs/abTdlC784miTdbTeiOh+q4v7NFptShf9b44+Zx2ljQjf1MVLGo2b0kW54+g26+hsI6K/UxcXadRtShfd/zn6lXk0bESU/4vFu2R0sSldwEFlEKQsjYPMoCelhYMDA03KUjhYM1g1Ke05uGHQNSnLCQeJQa5SOnLwhUEKKcvEwRmDIaTU0bRU5K3Q9TRlUawnIX2hOUkFUqhMgl5qUshMenoWKRyYaHpplMKayarp2UvhhknX9NJaColJrnqOUvjCJIVe2kvhjMkQerZSuGISqZdOUhiY9KmXV+VP2jyzL9qUk6kv7bAqbR5Szrtk4k9Yg//9Ktpx/Q2fNpHfEtvO8i3x/lUf/ytr6/MP/3BtLvr8cOckuzqPV/V3TH2Jd8PF/nr4dLj58Fm3w8PMg6A/+EauVxfL18an4923xP3q90ln5v+1BP7huh12qw/pnwFWK5jlTWR0cpekYZPPVzFXd/+mP24wGkhP/yVDpYAyXAooQ/YH0qBpQGDLQBk6Bf76KAh7BxwNgdfB1i6R4rU9IDwZXnu92jUsNNHqakqstKau74Tqk/0JNTpQ6Uej5ZRz6uHyLT//VWtEQ92vNM99BOTcxken2O7kWw77sdbdu/X9PzYqQ6s4BWNQuyPAV+gSoLNugDTBIA9ttdhcOn2SsEldOG1bEU/lSD8V9EU51BvVTN/8oHmIR2yuGStGgwmnCe20ys6dBpbd0YnCa544ELnTmwjsPb5l/hU+NJtxEcRNKNIt190oHF6kduwjY2tZRd/qxS9//zQd/u+HD/lhYv3Yc7hMdb7c8+PTpn7UicVGtWnfPKCp7N3Ebk0xNXC4ya8d3iwoTkHrpQvSSaHKk8Nahfof3FROr47i6f7KIjNJzUATYc2bLlxIVV7cSidDehIn2/RZ3pqW8mSYtIwn28lJClVpdnKfM3pH2KsArrcjwMTiypmoywR4t2IILurN0ivbI4W5ouTIRTf23qP/o/yLqLZRilXVoivk1CInrXZqlNJqUbNkee9SmvTcACkRKjIi+qQf9M2tNFuxlxarcCcdrdK9lAYepJUqsTLoXbrh70OK8ZAuy8MGc6JsETPZ4SXKHtHBc/vVMJr/1zauqSUPmA8oK8Q1eZk/o3xF9A0vIdWhVmLb0I4YTwgNjyPKJeKO7FBRrhEl9PHlizTtXiaxC7QVxj/sSq4wLygj4kNDqygzola8HCVDhxgSbcb4jSk9mjBvUC4Qt01u8y3KPaJLPK9ROsS4YJ4w/kMtOWF+i5Kb6+7lJC/zE8qhEf2IlyrVoUtiO6J9xvgdoeCxQ6mN+Biyww7lphHliOe9FLtIYndEe4rxEZflYcL8A2XdiJtAe4ny2Ij6ipeVZChBDAe097KCq/J3wvwY5awRc8p1foPypRHdAc8nlNSIcY95i/EZteSC+SPK0Ijr1Hv4gvKtEf0aL7M0Dtsktmu0Xxh/I5zg8TXKVSPu0t4BylSJUvXxuZfGYZfErqI9YMymljxi/oRSKvFhRCsoLYia8fJZMpRGDAvaNcaluSp/T5ifoJxX4naU63yBchdEt+D5D0oEMR4xv8C4aWrJwPwOpQ+1U1Jd5r9RHoLo93h5KdWhD2K7R/uK8UcjTHgcUHZBfDzKDiPKbRBlhecPUuz2C7FboT3H+Li5LA8z5n9RNkHcHNFeoTwFUd/j5alksBDDhHYp7cuSrsqfCfNflG0S80Gu8znKPoluwvM3FAsxbjH3GJ80teSM+QZllcT1Qe+hR/maRH/Cy3tpHOpCbE9oPzH+2whveDxHuUzi7iA7DCjXSZRZ1UjTbkpiN6OdYfzb7EpuMN+jjEl8WKPtUOYk6gEvvyRDF8SwQbvDeN9M6fGE+f8oF0ncruU236PcJ9Ft8PwPSpfEeIr5Dcb/N2llgTlQMlPsJrnN31AOiB5eimToGrGF1jAegwCPUCriY5Udtig3iNLwfCFNu9cLsWtoa4yrsCsPFfMRZY24qWgjyiOiNrw8SIaCGAJtL+3LSboqf06YVyhniHmR6/wK5QuiCzx/R0mIsWKuGJ+GWrJhnlEGxPWi93CJ8g3RJ16upXEYktgm2gnjryAseJxQrhB3i+zQo0yNKKM+Pn+Rpt24ELsR7Q/Gh7ArecL8GaU04sMerUNpjagFL18lQ0liOKJ9wPg1TGk1YX6Kct6I273c5iuUu0Z0Rzw/QolGjK+YX2J8HmrJPeb3KH0rAnmZ/6A8NKI/4OWVVIc+ie0B7RvGn0HY43GLsmvEx5Xs8ALlthFljedbKXaHJHZrtH8wnoXL8nDC/Atl04ibFdprlKdG1BO8PO8C6uXiaV/aHkQmoe6LKq+c3LXIJCy5tOy7dNwLkUl89t4Qgddmv0cn91U4bRCm/LTa7ck9FzkJqz0T3u/VyV1el07SlJ1pX6TteplZOuXKac9O7qc071x6y5n53tXOZDbSvN/SYbuTu5PZSJvc2ey3dLo3UuT/F7uRTUmaB7VelYtqlftUU+1L+FKi5j7eFWk1mNQSk5UpmaMfq0OptdW2H6XjoLb36VBVE0mvuS2Hmuqy/yKdBrt2kWK2in0qn/b9ITyUOrRqylIb1GmfLqvBlKYy9n24K7Vuh3aVZD1qV2+gDYF/A/ai6XmT2LLVheCE6kIe2jM2cVFFs6u7aviJD3YdDTnHf2jk6qL7VHe1Ti9LShzPI4k4nGPD/gKuO+m0g6Trl4TabPpuHpU6E7540nbMXdgOcPgwpJ//iXDQHlLeoe5NGEx411Y7B6cHkJz+eZTV62LPNyf6DAkc8cyDH7/7D8QD5Tla+zVowuYrt/ySo95B1uvl0PjnWbDqwNpk6Sc8nSkS/WdVwJbl7stC5IzInMUoPuPjBybT2wpvdBZ+rYAm1s/6K31rPg6rpxwdozYpPen+GckuJrarVJHu02cn5DH5R+Tn67Eb4knu5mvh4ujfGudz1pdluWufE38+xdCg3F1Pm6/dc1eWAxJeNkk1SP1o86P0GJZVEevwti5v3sbndK+rz9oXcX7czIcfp0iuGV9M2bk4fOvlerJ6fcoviWR6DuDl9Dotd1THPtAwuWr1qr4BNRzCI5DrJEXjSaXlFHfoqTnuGne+HjA4kZ8V8oR8z4sQJbhumobQMoHgnT21v0OAdE5opMh3eKYT0uk6naw/EDCQ0p08/e7huLmocVyi7GM66+dBhqcIH8scFljag/PxFF5GN8rwOsxw1illq2kwzgBvZ7MZbTIxy6MOy+jnS0Hx0m7EIQbRCMvvSA4XLuSBOR66ymPhHYRJ0pGF02xpv6sLlniNsKVd3zQQ65isvkaAWiJdKSU+1ae3Y0DhDy+nt8dW4W5ZiqaCfU9shYufaBPfWCsuYj6yYqND4ve3ufJbHAJFBJFFVFqbFI0NYLQhD7UVzgkqaeazf48Q6uAhgVlIG757rdpBMuhaeHiWHd934NM7TEQtjgP8pr6dhqxU7k8KEEfdWFXXR45ygFtlpAvGmwk96fmSZFEaxBWGizjFkg0UWwQgWF3R4nw6bxae+fd6Vd9J9RIvaCSnuyHtG0AEEzD+kQRPlNkUG/RYkKiySz7lPIq1ll28rYt0BWbyOnQDBs/IcP9opKn1ot/MOsU282XIZKgNUXTq4HHXlaBNIdmXtVoDFdJQHjIoShl8qUKaaT4zXiH3IVVWzoMK3KBy9/AOtBzWTrd/CpWuttGNX4anih1PJ/2jADoByMC3nyMED+4VepCNtH3Nf01VVu96Bp6PwWkdISDT5ovTV2RhVnCFZeqm0ItSOksbjKL9rssA8ZAYu3l7Ol5GcJ54Hhy9oszHINAOr/RKK8b9sqaOjITaBTOcRooTu2yZDNLMosZbERAfC2G8iBtTueyXdXmGHPdX9ZmCgEcGOUjfAyEYonhbNnjKTZ5X1UB0KGqrkv50wjNEOj8BXBav2kc1qxGkH3ayrQRwaZ7Bi3KKmHACZaZ15h060wSECshLgc6ABjrVbyqfMyIgrODZ50iiHOJJQyWL/qTCSbE8pTDoxhcoTwUGoKsKIS9tFbigs3LUPXBPXS/Urxjfg0syY3ReVc+0G6NgXIoJreb5TGyLUapbqDl6lr1kVU/RFoFHYWUpdjGVtoZ4xAiSd1RFRGH2YSa8iXhVE/Yguaci+D0UpnlQasqkFJIsWTjTK+cKnWd1Ih5dSFQyKMCy82c8JY1faDXJ3QMiKym3acRPHuvrmeajU7bM42CsT8hHil1cJYzOx4qN7dMo9pBmPA385teFv6EWwZOH+duWak/e+0s0Ly8I4cvv5Mr4MDzEwVd16B6IA/HITJC+KMl2FY9SzZj2hfe/mhzPyaw1AgojXaQoDbWLWpOllEHEHw9ORSnmW8MNItZUJCcV0OKdhLe+nsVtXzPcViE4ROYlrwszlaBz29JiREzFHthx+85M9GmZejOUPEw1W6F5fdEcD777gk16MHIBd0sXv1rw5KE+y8Y9blYJidQEuejYDkiJIchVzg1kU2iBX90cKEunGfKvchyjLyec3dmd8YCJWxvavB0q1EfuzlNHq7vQsb9O/8A20QV8gLOGbt429c6TB7pR/ynqcPbOxwNn+cdqU+/ehnrqbmtVb7pOPcW7r5+Ug6cf2DravfK1yT4OMplwGtr+rjOZ8sfcPsU25+GyKCy8eXIwLLCpanpraEdo351u/3Kg1+H29cDOhGh/R7JU7vEpBDfn8bioQJl5nhsrUKifTgWifPx2E0AJQVWFLcHf2iSwtOiFyz+kFviUxOuvHCCWaGWGPN9yBfKJOnqr5VDcUuG34vDRhymPvq3QquV5WB3xbKas7DsiggwTcHWZ74ImkkKUMWNJyTPvfJ/ItiaZ+yKv7/lurPE0u1cmaBZpIgOkQlqs813b0mSDoUcBG7Dzf96V3nFetqQv8uoIey7rWMAzJ7q0Ti5dhuK0MGPpfsDQgDqu93a0DJsD/5xf22boWJhTwGKZmkxdcaWoQXPqWVEWw6J/YjHNdgT/pAnsPyaOeoN9IGxoSmNiY2/qcDbZMaRHHqj5AulpD6bDGNLhVBT2BSoNK8RPEzgd6BciTtRkTWA3AeYgzWhQFPPg5ydHj21Ij4DewUVSUWVmj6kWajSW6EB2d218cVaucFH3dsBVxxFZTUVShSJVzoiUvp+Y00HJPMUB3+Tko5MJljahD22esy88X/u9mBUeNAZ1iY8H5M6RwNeY74BKOJAsaDeJ3Ta2I+ylcE/hK/0GxP/FyarR9BTdWa18cbMAkL/2INuB9WW3glcFjxGUKrsp6aDMODIvHFTU6Pqyz6NrfSqRNz6Gir0LePIUl0oVv/UDz3HmO3s2aqEq79r/zV5N2WnDLmV5bp1r0UI2hPyMhEkntO8LE4NjeAZi/KZ73reJyFqhCI7AuqMsoIktFwi1GRGTqr6iQ+bjjM4nqsOX2w8w7be5zbmOg9atpH9yKlc3bf7xHx5ZSgDw4985o9/SPxWx9ZjvwdL17l2n6+e+RftY+VR2ol5UbOAzWmqXzKe4LV+OeScg3TY/XFTDM0nBpzFVwHtYKMoCXQWcR5VCRLlSqfcA65yAAOrDrgIpcspbVHYanrBmRSlyiVrYAzEAfohZwaa/qyOnJu4OJ4ZmAMHTQDfNkzJY/UrRfdjYkt2D4w8bI5zym1xB7i8adeVcBouDhPLM4cFzWmWpcygaEo8lraT2HF+ia4gpS4ZEhxor8W7kpM0upw/TfwhuJ0zAvjMKGSD+xbX9xb+UmVJmdP/Um2Ekpf0WEXsprngf3T/2N2NIcP1IcDxawrdcIBfYX/bxtIZ3YJl4YXpyKv/ioMvYMuoc3j9Npri+6lDaHf8AGQ+4fx/Z+g5KgMoeDGrzTxCcAsM4/Ylx+d7PtdGOPajzfL8SvUHYAnDkXQ+XeXIf9UYEHGcg+DPsNlAn9waH6w6plnD1oSCbx7PEUpr+abvOgNtbKmPuBtBp5efG/weA4iqvjrCRAs957Xx9yn2brBbFWfoSrkH2vwyOYvq9GVKSud46MbO8PUewCjc/jk2fAPlNv+CHL4I/I/b2612kPFd2pxk/qr2X3m8J2KGb726/22dwX2uxHWRJuVdh4r65wou44RwDs6py0GdgsfwXKygHJfwJrf06cm7dGyv+UZX8XzCsoSmu2fWjypx0bx/S9q5Te7vNbmbmOlAB+LNXc3xedm4c3xpKmOP/oKU1e206W+U/OgZ2KdD51A1gfZniSoBpUjeU1JjJJSY+Z9jGMuF/C5G7oLw4tMN8+cxtNcr29Brp5rNW/gx2OA8bfGtnRK/qN+Pdohr7eY6LIQbLo5vCWlb++FE2sulGJesb+UD47625S3irIp6d7RcrjSVlhptpfqprGzYygi7F8HTlgrmejfEV5NPvarWaER3uwbvd7Wtp7vxU0y8MzTAOowvdMamsqHGcnjwz0EzAbV6uibLctdH4yyzh7RqXT6fMUETH3fzVGuw66yX/Udus1PkiMWR3N3sIx8qiLCSnY0IbsEoC4Fg/Zj4WgpTD0eIsWvUNNS+5XwgguAOn4RROukOiW9YSrbLZ1aDjoGBHYtX5s1kC8FVwTXgIElEaCRpnrwP+7wzfaiCfW+GcCWg32A3fjjNNKo95GX3o/3swzmjwZb91QrOJXMm+vahZhXMCZBoOUFrFcJDEfQIuxO1sNa64J+RSFiMTGJDVUbShqjhzQ7l0FfObOt87V/MezrZuv6Le66SZSvlmUHNfwOi+yNcuhG7jok2l8iNmWOeYaZvPHyIUvkflNOoup4i5hwTaGj+45vdOUmW5TTrpUPBXs/cv/UriTs4Sr1iWS63rzCP5cwA53Ud/F/nB/3DHGdBb4G8u8jeLndsqLiH8cP+Fbo/p/AxNh+ZNn8MLJ31xXYh7WOiZossr33hAi4IzgSiyywxlTRWdQN55B/J3M1glPtyrcvGxZsrJW66tjon2eaLVS7DZZO41CLmTB5F8E5eeOwKWEM5wDj9LCKVEjj8bvfNqVgGWXMDPH7hlBz2e/OwWWnI7xeD7QckCS22YpQDbcKMyBVuab9H8bZ68EGE7r49H25YB4D73KpCzsoTcGVvgFYhtqn4lu0+yAn5wJAFkwfnlsy2LyzJ1el5Weuz5eQJSLoMS8U8JOoc5uIY9lYv5O4NTSmWjynQJmY1SL7nNU6E6v7fiWjQZqRPkpuAcJN1UVS29Ibbq3/4vXuiEEWnRzoEqzFBCrgs6MiIZ4P5LC6zQLjc1ZGEoL+5q652klg62CHMnkrS8Xwre6lTKfei/tqj3aNVftbOM42TE6lwnrY5uu+7J69ZyhYMpJoHq+sT8Zrf+kiQ/ntlQAt3KPAXuuVRCFDQknIuGNdUTgnupxB60jpXo2VRcA1KeIROwTjuoqITvqv/80MwG57XEsVGAAI+IuW9Oo1Ed+TZVZQEPNVROvsunIa0pqiGSFO9TG5eOZoJFQ9r9UI+6XkC+8bHkAhYlkg6OrBHjjaWsYeFOsmN/Oa+C09S2xAQ/jTleluJLmYWtq/TN6wBWd188jJUAyel+w0sIMw8uyncDHo+YDOLEDH+Gt1ErEJwk6d8YS8Jtp3UBAlwQNu/Rqa5y6MKvOUtml5/O3PmHBmfvrtq50P+MLCgv7pZRc8N5jSMC88dPu6yn5X+zeFV7pgP0lBTP1ZnhrGmDGnIcdbaB3dTae2swPhYor6XhF8cfUQeFqQ8cwSOHGLUPFEUcQvzYlC4g+DuLPFR1lwg2ath5jXFkneecDjpYfw5OmQN2XZ0We68mipJ++L4xtaHU+ajmWL4h9nU5wUfN+1cP+Ewo7gc93Rmh0jn+unrojIbg0XF8yisAx8EvX2z5st5dhzf23G93Q1M5bHI77WBtCiI11hUQBwfiSGlut0PHawo/u/cdgBkDY004m3pSx8dpFo8PK/Mfrt5ViLXY4CfsF4QGhqQXrw/xBRRi/425I+d6oS3TmQGa3Hs/mBS+V9xzMNHjkty+3sJOJFnLBcfAHbHQLDjK4onj/Sd/wFvXo+qoLjOCpsxPfurlA1F8jpFMDjgtpINNFBkKZipYEfWIZB7CLMQL6NnREG2HOp4Sq28QUzAX03PLg1KDO7akqTcNKYTjkHNdEFqD1UamhXSwgAgSAQjlBXQQovM9+0TsTY5LV9IT5ydTFjcBWcFXK/UTkOTTT9uYaYxBnIB8FA+bM1+pcen5SEz4TMKSfDr7lILQQpQHz/Lq5+0uKYNwPPJRgLcZWaQvUtK7um/KQfm4zgw4EMq5wrqYEypR9ArClqrYuPQc0RyfSipYTGeIafn7mIQk+fRL9cNBz3kLzfLqTVlF4XyqzLhKzt+7gtmhNvCkkM4G/BB1lSFyvCVThG8TZLJYUV96LHYxUb6Inl00dsFKTKBkuEbi5A8fLbMysMQ/97D5CBxp+ZY/pevMW0AsGFp67GuEJphs4F/TRb7mD7fkjF6QE83jnN52roFMzWqazbNf4VGsc8Ov2hi+ig7JwG6oMIJI1OhEuKg1fK3c/n9doBHyS7mmfqkbtgEd/Y6w6LP8O5DjeciRUrKojMyt+mlVOLNVwui9Hxmw5/ZTdtIV1c/Un2SQXOCrO7uu/Gul2sk8ipnpTn4KCk1gK/O6EF1cy+oFIHhFugUXkGtr8ZCINN8CDZQ8nNrWmarXzB/LC/iXFwjETWxvLhC7Z6eV/S1xgbwYav3CIuLj/LK4JRswqRyuHFGzX18Nu+jo3wBBlF3JwlzY9NcTNCLKEodHoTFqgf0RJR6x1xpwrOfZHJ31+lOimQvn9pGzBS6H4qQ4tNX/VSe295blzLOUYBApOvQb6tL+tw1v833o5sWltefcOf/6H/BklhDjZyrbMu3RGXLtZy2Xjz/AlSd6ob0Qa9toHc9uwB/eU35QnkHe0MFdwg32xyZxlljdnvtZA62IBQZIbhu4fPJtNakbEEg0vE8NTg7eAZAZTlH7CMLE8OK4ycoQ6pg6HZD/KDfykPMG+EWzNtWriNc0TnZwrQDYXmCezhJSIb/cLCn98IkQt5DFluiP8EvGOue6LPS05ztWQysy1RrV6pf673Xr5NZTrM3tWViJ12alPHrY44SLXXueP1F5459lq7Wn8eN1zQ4iVJivhC7ShFFe30Je/crzzXtqa9Pe5euvfvP56KYYsx22vnY2OBJqmNHs3xHtxDiu2A9Xi5Douwk2eULa4ncclHoXr/dCV7zkbb+C8FtNGKfVv0WW1wzfgfm6t1Z5ctPY8qa3pWLJxhafrpzfAM4OYG2sL8rIt+v0bXdQC1ocGtLG/Zu7XfU7VGMFwyW1vmCZF7RWX+PVlxtK2vZgkWm8JSZulIpMi9dnRr8Jkm9EJ+S1rfhitvHK7HzrtvebRabxTliIa7Bn1vssxhGNRcciV541YlfMwvaPjLR83ocksmFVgtyuq4uoDyUVC1DYpuZJpS27FniEYojN+11Bc3l/nXKHqqYt/WjSzsYXTDuZuCpqlzAYx6KStkycjKbgD2aVe1dDE0CGgwFCs3Mr3BsBkU0NNFDyto9LyAEMCK4GEZADQ8v2IHpyCKWDAIr6iUlT+Znsprxx5XupBnTPZLfXUjdAmYkvlVHkKlwuyrcts4zooE6NHAbzSSCj6QHsJFjc1el4tMsVqsMS7gnQim02Eml/78nU5CuKtFl5LLJAhw6uCgLO/kP/ZYdlEgqAPO/FYygOueiqfW5nsKw+fwBGUG5A1dlWaoZEIN4pDcFqBfBzRCRFKlIymISI4CoUQrdsry8TKEJH8KjI/jk1ZGOWwwPiqgjnZQDq5hautEJHzDwicLgwGt8SMaY/gaMsZYMPF91wLxjHBTQE5zAzAah9BCHD/35dCFF+yl36qR1F4O8SRvKg2TjKbvfns/VWQ7VSejmBs7lXXdSxeEcg3v0irnM7r8mRtDtHCaDMYGvAwwZhP7F7+xklejiBmqhfGbiQNhY7FjYbp7Zn2sHHF4Ckzh075s4pj/uWKwU/aHw0PMRIBFUjqGvnNOB9AEKBVqf0BdkQCcqlkTrm5boehzSDcHh4wMjci9846w7FhVfL+BlXix0jUcqFVhijV1NGSJXmzCKkjbAmeI2jvvA3Q9vD4BxV40wYUPqSQ/IlWOPr5tHgh7pb6fNFWOA7kcfAqSutDz3V02aT/+2QzloxPv1dgN0T+wkSTx3/Stz9agDLyTLZfTxEoi3BhdJ+ITnZWIXR5ENMnRJOB6Uh5lwehyRHYtD0LOmigFMK1a7Y63c/SBJjF4wwNj8Qc2CwxOvPwb+r/5+NS16p9UqSoParfoE8jEe6vcoQO1VmTzsEQis0bLhnmX4FDqDjkoX/CnQmSVwJu70osFwjBJ2CvaTXj+tQylFIZYVdXDKL4lWcEh4g6VllUsvt2RA8ytEwmbKwLr4znDpH2Jvn4D6A6Z0nwyvL4ctQC7XB+DAB47VFxes+peeXKxEnBaEUAB20+674kXoyDr1heQtOHTM16KBCS1Eb+bNoXjUImMjW0FOGLjGTPExWw82tXAYbFK1HRncBo8CPiiLwONkABm5djbXaiVfSTqMPGNMA3xlag/SPktrixn0knq+hImebI1OonWM7iJc1Xw3emYiREjPxFYQ/BQ+1VrX4jexRciu3Kt0dzhsa8GrkP+jAtw/UjiG6Z9A2vJa/uqsx5Gny/Zm/spYUON05fg7QgEZI/s/gxwdOIZaYJY9faceT3ChKsnR/j1ScL2or8Vp3i0YREcJxBpOFwsE6XgDrI4QtCKsb/oTnEWrhezJfIDRLh3cJut3eLhGLw7l8JkTDiUrE+KVXBxnMJCwCWDVESzUUuGqBRq2bgvBYG9dD2UGT8MOB7mgxXBI1Ahd6KHxDLiDIimHGbKLYrS/Qqg35fTQuAX5n26L5yQjcPKD/3obvl2+Ts8p8iz7q2gyfcP41ijP9iXZyFhHk0fo9Y+GAba0GSp9XoJuWQF6ZZhplmhlAOqYOPSs0VYLlv3XKI3UCxdrInIc/WBdBviJDR9GaQANm8NFcT/qkOhvBAX2ZxJw5ZKIpvouLp0R4xFoblHj5igHCoeKEIa0H68LpFFzam5irH7J4ATlAKt7HRfUoWKzA0drZydgjD8SuFE1ZA3ryhfxPgRbyTWONLiSVaYmD/6PwLDSrOBpE3tbGpqNpeReiqSsGz8IMBNVOGv3vgSxKabiEGQv1eQ5PznpoqZ2ccb+yb+lvqIyUBQgoZsX03++Pu2XXyFrgWeH3rtDg2iWNW8i3HAlp44MbORHLYCvfkJ0tcGV+w393Tl1GaMxckuVdvoxl8zAYmU362tpI+KgF2YwDhr7V4GtGZ+CP27ENWVWuHkaGaOP2GoqWY8jScHAWz89qdzl77GbQbx4akZbzioS7Ion3jhkmOJood56Gdm1vW2EJZhemQH7MfczOValt+Qpm+YsrVHI55HpHq4t5Ja4ee4bbNnKVCZBXUmvmPuI7zuTMovKlkCY4H4vCsjK6duT9RdBVXu61PqzvK7ChO7y7qH3qEswAJRKyN9OtQnTg3Uoj/AxS73luDRjXQgPxqfX9CRW5X03TIK0hK7uh8cqInu5yE2V0N2RHLj3hXFkxaiAqlfoXVU5SiYGyk3lDa468PoLQRR+d6Y+U/7VYKcu0YM20vYwzoj/ENFVAW+QqNWJWy7K+8m+53P2QT+cpWetWCr0Fnws82ZQfUOui+omkh0L1/BLLliQQr/u9cR9s1n5dE5ANG58+TKxzQFdnnQTwligfS0tf3Jp1D+zhJy4rrhc07po0s8/nBTSrRKw6rf7DM63Hkit5RuGuJQKSbm1aKuwoRp4l1OaS4NZVQAUlNOB68aLOU7mCj/i5XWH+BoUd9fq0vpEPFxqm1gzj43hS8VYXO0hRbGAPBaQlizX/NmZ0ZMzpdf8jMVYVG9KVw5UaHeEDbNoH9bTfD5HnSQeD3oKBVo9QFDYIfv6ITjXOHwYt1tdxvwTw7Tww8vDMR3KLcR1Pm7XhzNSoQoXVta49MGSzlMN2gJLByuy43fWHWiEE+PNL7164GlFF4L8MRBOpfhg2LOiLBGVlH62gaX1Glt+oM21vPTi13SlE5mB6A9oxAnOsHN00BFUjH9GC7y6OiGqQ+QHsgWW5LmyTOR2w/zl39V14fi6zk/eBtk7uRGHuYTokUGU6THtNkbMr/jOl4sIjUCGcKM1BrsKixDdorPqdbuPgjLB7C/qR29O8WbjOLLqhItI4vsPCywagHQy7ITr2whN/c8/juuMqsuNlSR1+L4iNW6UJxyupvA6vBBTIzMsTUHgRk82mFJMCb3UvlLx+jzdPs3KmgDmgBctZS9ftL/a2Pkx8aUTT9nvlTOW78mlowvxA1VwoOrQ6cWhKGCUBxVFa25ZW8voT7gHEZRtUKQ/eoAVb/20a64Wg3wI9frfXJJWggnbpE3Ai1PIjaUd44dMhI8oprqTyhMQBNzwDlZ62qK/j0yXPfp+GL2DFG+NdCLjTAjzjft89wjM344NZE9VurkWyACBEUzH8LD/NgAUUkMW+oppQeCV+t1IhNKlAaYiz6HCOfqBi0PLsVu7R4eVvRHKFHK797gKbT7JcC2RTwFJZVBjp625TOtT6DXbtyFQW8O65+Vc9oSNzWP3Ixj6vCB7qab0+dqQG0CqqLSjYLZVhRW1rBkkycMGDzpL3C3S+3Wc+43u326wO+yDnlWNCgH5gBSUPeaID0QqRU+kpREC0VobjwE56tb2tCJnKzkKUQqXpjA4SeyVDWheGQuy2javvGLn55nxscaeXw8AeCzYBy7pQFhLWFYfIzK56IUecCMDUiAX0IOoIue7pIA6cwcfFKNmedg6WNGKfddzUYEqPUZsVHHWMg68V6bUO/Cj+kUlgK+igO1x6HPM8mjreAWSI+EMSDbTamtw+8H+qd5c5qYYHL8cpe11r/25u2R2tuH341shI3p1r2UMg/T6Asv0BvFIt7uEsbJqDbscHIho4SQlp1ZzqvRpdJG9LEKj0YMExViBE4b6zZNaDK4jhCk2noTRzJ2IQj052TDaGgkt5ovVeCLmEkBnFb7F40z2+9NGRIt236kqiPka0vUQkrw4IhGUD15G6KhwR1mZ3bPgPbvT7jzczuAcvZwv1+nlXGq3nFp6r1dpHts/8rgnt86B9TbT2mVoLr16Fjl4bfXQdrb2C1tL+AIfV7LqizsMYJqpO5GNH6fFXIKziSSoMxG2WMCC+NrgHC34Nw7SDjGVIVDMvBkq/m3bj7UBpKAo2uml/kgrLX+H6U7ofi+bZc0KeYeDyEbmEZ4Qbtk/OGdagpNai7+cnM1yDzrY3CESgRlXEdgjpwgobdJp3Yhc4P4MTPVuFUl+rWr+GPbrHdNe49PwI4S47gxntdK89Otw4+QoPX+kah9dHp/s/ROeopPBUzWs5/EOqVJwu5t71F2ae/Q7nB10OPLGwdWs8M7+ckJU0rWKxKARGVBfDYJM2JbsRCIqvkLdboX5wmN9I2E7UnkzSTnecq6kl3Saq4GiplaFQLRBdBUim4G8obWCHzkIsPpk0wKWQogXLcW7b/PdPWUNJcLTAadWrMRb7je9R3B5H6mkNQW0TsuiSLGFszEJOe7eyp9XznSMoX2CvJ3KIeYdPxzr6Sg+v4hwGKdKn43uDTXv/7aTE5ftzOuzetda9oaebsrxU9nhKaVlV5ZambD/9kpJjNSY9MMO5c3FdYSYX1U12qYrO7nU4tFhv8aDz0w09zT8i3emSWEBpYGzyIRnFfNCiKUC8ZrnXU+X4tEyCzHUULJGqb4fjsFzgp7A6HI3lX+VSBUdZDAyk1n2Ul5pUPpCMiCbG0w50pzzgj3CEkappmoF2AX8b500NyLgBMtqQIR3ZyBLHwe+XJRS43nJPSdq4hUy9YGiO8A6o9H7ycWb+bDt2ywcINDOL2WB8hC/ztTg2B9LW7JrSYJqDBTgG/B0ZEajJvBlUOyP1llHYAR1EpOD5kEo7MPhcO3DLHNrmuOnhjo0IlupmoRp6WmuKlhEAMBhloWzVSSs7u3HGUFSn+5/mb4T5Th+/40u9+hDhxdx6nd+Jsx8W7Ll2M91Guk88fU2xT/CeaIhsrAVyxFxzoCSzvkRL24w47Lq2uMCrCQdHBWc5I38G5fXj48zcLtUHWjcN1u9mU84jW4yZ9ZEOQJVyRBo8052I0Kd1ln8DQOd8wzydSugKekLLakSMBdE4R6ILz1pHH/DIOwnOdUB6vdYnT4cI1TQBuehQVEiqaTz9G9GGUopOBStsKkR/rrvebrfRKCtQMJVP90ehx7TAMbMGZbkg5q3cl2oIvJmFGxLKZdPT9on9aJ/QwfdVE7RPugiSSq3l0RmYWikUvw+XHTXl8JO6vKTG46SFNE7ODk2X4zSAxMIzRXelA0XtcfV6v/LYWxwxqzFPavQ29E3CVCzA7vMCUiA5pBAvX14IWEKHomrV5WTglGkDHQuuStxwMxj85Fq7bPsBJJxra4hAjFc8UGb2UF9sBhjmF7PRaSLclWea4y9NCCaWIV8rHa3LO9yaunt5tc5VnruvVcc9X5EnlHKZ96HKYr5SPblVO+uV0vrCVTpGmBFWaFJDMpVdPa1RikhNoNPFhHO2L++8xSUX3Lyj80NgdUAssWpKu1hwPTRpP0Tw1kbBwl2intNpa7hbVJI002EmHvsxcBdXoSWPJPUyHSO4vsG269D0cx77pm8rIQbKOVkiPS02i0Pv5ucom1O1PLZqk8f1gZB+BB+1mP+fC57bPJOM1wST5zS4CMbW7vikBaEH9KUdLPvGhe9nXHJ8V90dxg3E0JjHUzyIh8fAIIi5F8wGjOlXTkhv9PBIt0cZTHUV1KWdBu51BiKsqIOQQ3PHO+nrgwfwuA5cW4VuvGNSPH5jAmHYhnG9pV98kdqiXmHAo9V86LIz/WBKLuqCCHKznXK+vtjM3vc+wn4jE6vLZNOUTU//vZbz3LDULlRWlnmXvNc+7fdarlbk5jZXXlEGxzypjSuL4ZEKbSPwLD9XS/GMgc1du3HLZkOvXRYDzFSmaO+hsNq2NmwGVi2DW/kDFBvtD9u1zLfYY1QjtIqFmL9Fy6volKpxmpugiox15Q0hpC1cgE1oLIkcX3AlnlgzjTtTLO0DJxH9s35Vbvp4o22PwVhlkaGYBtaYKR7N1wg/Ehb6cuzu08xtFtowYN/s70AV5sBB0ZsY5rw7BZkDnIsdaLSlFSeNdkzqtKF8Amk0ImOlUbVALDUr9OzBq/V68eZXBX3y48PiG6GiMmb/DXv/rYEqHJi5beJiKG9XM5c5/PAxP5KtrW1K9mgHysT8K1L8P2JuA/evlwzG+CpKDyStSvLbuwy1/Uz17vdewvVhVYfsuoSG3aq/QzWrA0dF7zIt71b56rDHT1UhVmd+SYv1gVTHHz564PYWd0Di/L5zV52ZIVFnd1Sca1PChxsop1M8aOPEanDpbYGYbfHEXAP++gjosc1GTHMotrqJFkcEI5jDIvPUnoabvgg+TG5RbYl9QYc0udp2a385o2MmLF553Las8KYVy2y389ivTDoU5bvb4u7QpfI9vCH9rkmNXeyewGWXDyq2FJ5WLWmcPBFhVpMqPAFxjC3CToRIaMvyRlg2UgfjqsseCR+IRXYcGWshUDXDNI9TuNgtxGnLCqbCxptj8QDBX38Qq9QOqm5xpO54EbL9yVBW0ugDe0mRuBjZ94sZkBcJeNW9yb7rJVlYKc2URA5dlC2sflKR7J1pdG9olJgrrHo2zDNyrixWsZ9KFOtQGvc5Bn47zxhzeG3g7cEEr9f46ACP/RlXhAELiephXm4W8G86xOKPdDZ5kl4y802z/u5wR/UFPM+R+ZEJDPKLkV22vz2jeUtpMI46BG9aEGJ70fVZkohKPjc8j/1vT7L3USlYFXpCNi1LpYe1L5ofwZGcwkF/M5TltxgRBNdrV8guVmFXP7umkMRYCgPWFt0MgmZ1EMQixLQNWe1ue3/diXMlcXstjFGqPW5GlEoRPqVschiKlbhToqHD4o+7KQZXXVy58HWd2d8J9IyMlwZmQ4xCF6Ga54oltp4PFThxd0nVv1FJSiM6WmRSkAgNkYoO8HDfKbCqa/qh31aeBSVxFiTIaq2o0gYd60JqUFhXiy+wmaRjnrw10MyPdTT5jWW6+hGkG4b9WqGmYhANt3tQgzLDCTVCcTfIT1YoQvMifOC0aIgoaoKPsKAD0C4BV0IvC249Cq19PFrLnpSZoSZt64C1Vh1pL2HCk8HoL49ifBXsoSfME9qvxPYKwW6cOdG8Fbbdv4KFIdQNlfxR7YOl70ytfA1lvgob2l3ouy5ESwCAqr2n76mhbMAk8D9pxfwVKdCqrutegUzVrKS0o51vIB5doJiW6pPdVZrhdkLoVynzSTTzQYFod6HFYliipicK/XD8UqtBA6yYVlNVL66PqGnHA9JCzXkezVFjRYqJh9M6wUm4E6PxXHIMyagvw8+7StP82T+tnC52etrTs56ucHrC07mnxWoXJ9XleH+lqL/robsY44JWOmmz0gxZvaEAOZEJikMMmvb42mUaoPqMiruovr+P68mmADmUd1J3on32+Ww4cNI9QPWSdn0A6HmmLU7Mc4Bto3yc0CdzqzE1xP76ccujfpVJV4nmseTlfSffQDlR5H3Dp4CzrXk+LmB2M6h4weKYqgV0UcTDodqetuXsY2ZNa0n3mOheo+Gevr+Xl+91hbX9yI5j6yp3htMwXQcmNg9Ue/L3UVqSW87+zLzQXDAHNGLuTmRxt0DP70Y+zUP5Fl+EN3poy5jJ21N1h1AARb/P8KAGYUcT6p9wpI3K1twlyj80u+p+jyR8Po0mhTM1fvtYNG2CWQ28LOp+keGh/imDPyMYBfntGBp8q+QSvrdawqYqZdsxH0r6jqdBQd+F+7ukB1e6d3hQ62qMUEEJl1RQbeUdrC6VGlrw78A9Hl47ftx9r1GVRegSO4UpB/KO3Pc7IL5eeJSY7hAq4zExc68k6bkholmkUn+jrDJ6ox7YsttkEm3xRRrgG72cyLxHoeacBIOJ8CwZjrFYjm7k9WYpmv64EEhPrrAnYmL/az1XIr65eZFfJ3VaQejlO5S1fi/Nqui1QdZFSgYcOnK7dp5WJMOWNZdle1XiDov8yB3c76KxxpgknmOdKiMcPhD9ZwvLCmTgYauEss0tpf+EEimvjF5AcVYgKHzjtL2P/zn63tusWb+DNNAxfeLKjBkrZN0zzRa13WHx5Xl0b2c6vpoAKi+i0CoH4FXkatUiaqXoqA/7pV0XFDbRUJyYR8ybaT48NgUXdc8wM6+11/6lwrcrQrqd1NMKB6ElBgoLTXdA3QJ7uxewS+FhhneOGcPWhIkAbFd1J4I04QW67Mb+dkf+fCfzYvMzuY1jvHkf9PWA9jeM/q4gvvhgkRytq6u7PDJZBhIABmDu/LDVPf5PvHC3fuSnh95IY91ZcZfPKVhvuVby1BixtXlGLzDgtz+r32S4fYVSROWrBmAjR5mJFr0JP992B5UqudL0142RT9dPZ1129hSK/feJXVH+L7ma9R/5R+3KUPWwHWPHO/KqCXVuNUeKD+fVv88jVuSIHE7oEhttWN6/Iioumm1F+M2xQKZEGKGcMg9aLwexF36z9ZN1IXayt+dPRtb66JvcWoaMqv2/YOB5Bx+VdG0bYudPgcnevvs3naW5Ev834eq1O4x7CZsg6nEiwL8nSBMRUESJFdrPo5woaCvB0i4GDEB6khrjiKMpskRTsAuKqchFfwgUi2r9BF5KbV4aHdpRP3+ifGsb8muJPm3gAK+ssrhWXIiqDINNrlGQ94HGR6KAm9dM1V8Tdf6yh5JTa1QGMCGYq+Qm2cRZVQtCCLD1wRQBRb0JTcfsKkF2k7qyAs3MIaiLwASn3iRXYSFfxJVTCIjhtpuV1z4e+fICyyzXT1qrZtUk5uonpLVTqQ/j+0NpUdUexqpTW2FHMrL0Z1zu0Dffx5G2eiEt1ZEwrBYa+MU0gs6mFwTB0m14vEepctXSCeH38jwPmWs7lN7wKUC7CZAZKwRsC1tad0z6dRGKKwc7jQbRUD3Z4rfLXmHqOZx+eKh0nTVjf5to/dt8MTQHVThldUqKbNnWYflfLwhXB0bCQZl7Dsw6Y/GFucrq4lSa4FDnzvp+NrrcNFalmdXShFRShnmXzkeq0PnpPHTEjeZON0+FSyjd8wjEwrzkc0S24a5BkD3CvwQHdOyeMjfyoK4F0lW9DrlLrvFoNQIo54n4MojDAB2uKwjJyrOroECgpkZdh52l+IkJ2jwJH4TP0wx8AkpyjHi7yYRhCv6JeawoL0wZngmTuAormyqtc7mOAKezgqm97LDbukerF7eOKTYU7FXAWYaZ1+a9kcBylgCWKT40XO90tLJ7Y7G4fPUklo4fcmLPTupcYH2SCXXFKb2y1Sz72K08kSKyYHl7QLtZFZz7B68rXwXSVofJNYX5K9tWsfORcf5qDCsKpFS2zSk5C+MIBZrB4WDfkw7GXPGiEfh4k85Hhtb3iPAgN5CwAZjC2ocnsktgHNaBRvMh1+bjSnL5Zt2lWtsC9A27g/pg7TVP3GGgjgTASaLnVlHaEMrKU50a6AN0BBe+wzkj1EesCZ8iyzJ1e9d0VbLVA4T2PFlXhXYBIMW7C/jBSg9EkZlayqm2Qwaf71P9qpx1wx17FEDMuXwAPoxhCm/KCUB1/kgFIr2Zkj1F39NtHZr93SQidw2nBX6r9DCkjlZdRz/I+qp3AxTMZGDgakrG5wf/5OTAbTlxEb+zWVPa+jmAeyLfv3j3aTRpD0I6uf7VQE5lrmYH4NY7NeVm8N9dWZ92B4Ca3eHYdy7G2wLazCLDco6iAwj/Bh0dOvjIk9USl1/ud687z4SV+Cy6KM0DiarWEtJVCxmx5IqMQH+OPNXBgOHpjY6QWivY3QtfAzMxTSQrlFmUbEXNdJNhZLGJc4IDDgpbJ1ciRxjmr8blZWTKS7V5PEmJ3+QlNhlEORrnp4k5aFDdBee8F9bLCydMFfNhQLPG2o290e9t/ALOu1YgCOSCeqEdnW28Lr0Wgqbl1G3jWyjGc4Pl8x/qPZff5x6CdasJWII6hr5HD+JZGi8ggpIrqjOETXYXHhwl62ZsEzYicTGOMaXxTiYZb217EIemgWriGUUwqZlJjW7E/v9EPjDqN9m+pkchwmfynqAWKejYKj0bBgekifiRf6MATpEm1ES1M436pKFyImEXpVi0b6Vh+HyRWSoKIXb85h9nYL2usGM8dReZSiq0xqvs9bHY049LCwk5R0BeWz6svEc2eQxzJVZtWG9JYfOX/PWhWme3cJQsa+AXdFndPLwnbHJHd5hdUBoEWmrI04IQKkAQvmegWUlI6PbQKdqbyjiCLNi1wlnckVh/UAY/TOc9fSg8XbABScN3mMmzUMMNQRA99xvNQnqNM40pbg9lOrZrrwzj3NdAULtnsbEhp69UCKllM85NX6rUuGlgOBqkXht8CFyqxkAZesu5kpTclD7o1tdtwtDY/yAksDu4qktg8sGcQDf++6scb3rsxBvUk0BDibVmfY24IXBRL7Lg7emgb5nmdoY6E2PtycwMJMVThyzsTICVerRELI1Gjts50b+syYNSUYOLkmPie1Hk9O9q+6a6+1u8KWra0bmpAJGzh9Ys8bnd/pzPU59Xwtne8siGRkR3u7ojaG0qeE3I2TDkzIrCCe8UfRNobZbnayUSUkbn1R1KO0nSx9fWGUVwQgbpjBMFY5j8qNv5aa6+mbMsdW5w3apSJuUQAlO2+FXLg0TGqdcBXnFv/apX1T3CsszVYEHHMKDD70AC8IHX/vqzLEJZEWFMdGKOHPF4VMHyLqRx1CTfUPoMq7h4Megqd5yKZlBe4XVbxxgXJ+NCGgDhv44Kc7NXaViDcYQKZNZO1pVvIHSbQDZrUivC9znhVtAMahVxha4/ZQ/pj2nLkFIGWXXxBT4kPE3fPTB3QAj+W6kOcTwqfNqbJYBc3/W+uwLxxN48hU2SOu1QeclraFzl3I+gl4kGCr/hoeepaHqPS9cqPJ+EbfEZwwzCAIfCLI5pP7o0kgCJ1hqNfd1Ir/2ZQkC4EDEjwzcApC5xO6/0KAkQ4wFWmO3nt2NXVBUf9YAaqW0JWWVu+wVA59sbKqHlsAGsQMjptS93WHYi8p8/DJXZpUylUJjaRNsofDcB5xqx7qqqChnG3GTwsY1qyFtw4MoM32FTaucpAzMXfNQ2L06YoZq5av3SXu/1qv+WvIzAiZIIGmrspg6MCyCc24x4EK+A+iGV+cHaA9osje/PtNO9MZiSPMi+ab9KalhOJY4eBLq6jL5sWUrPcqhyqgO2XtDgVzlLCyfjb2qLDUf11Mv+vQqwShuWvVG4ciUffptMpkwp1xKH0r1c5A+kDmYXEpfa4h9mlUpWrD0b22oJl1tT6o/hs4oZMTDNFCP+QVZ/4mB92gkwBpErN40MVYIDuzWxZFLulgZKXW7VsupjXeGDzdeCp7ag0fjXjlWnVvvUyox9zdx6xWxai5PPA7AechmY36whWzjPDSo4271ZiPjn4t3fhcsgxmv++dcnh65crkcUCK54nSu/I3R1MtKC2H3X9bBb5lWcKySyOlWHaXTVHeb+2mdrf7ZrK2UTgwy05tMSA9JfFJBFYnNUmVDa8NWYty0RPpv2A++rVsGunJfT4NSFXbQz9TFmJ/ReOy1EtbBzX4SHmTPJMUO9oFDQKY4Lj9Y1WhAoZOuXJq4JiFPWhu1awMBTZzBu2N3YwDsXvyvAr4Ty5V3WyxMLilBjqxpUGJzKdF7+ja1DF5BB6vmL7w/DyvFoBPKtnc4LW45M+QbffGWl++jwNAMItjukAIDqLTwR13fnuYrtc2ycdLunmFHBw9+eDUDROXEK0DXYg/Z4gMH9zWksboSXGNbPR5ZdHXvVECjli0xBlKEszMWpoBGmo44A5yIehU4iAR7AmDsAthlP1OnNbE+layYJJ+yjat9gN1EYqKuhj/YKXxUg8O0P1UfEBfUyPhxOzY5RE1kVo/WTja0l227gOKfKAlpsbygBHNXbp5seSewWgWwbF2tl5usJsjCoDWw8mlIxDfHTKsV9MY9JE1CTHjeIqcrGbanmva1g5xrW1ukbMnj6bGlV6W6vrb8ZgI83SGen6FGabIgXMrIXM85dNJB52PChszmqscZGgcPU7oYAOBRcD69fXcCVy63EXhlJXjBaoVun0G8i7h8vrrkBKxRkYdZkSTLs2Ex0aLFjm2CHJ46gxFHQ0DkVr90kYI9fLJzpMktrcbi41+meqrTbHiXeNQtkor+xwrfHevwfkUu7xGeLp8kny6QpwRTzdJ5EjH8K0uFvkCEXABMWFSZf1Nz+3SwQLyfEAQOqsHFouR0BfKRjyme0Ar8llsdLy2n3nV5bqVfR8Psz6yGRHSZB4UQhfOltRmpbDBB4HW2y5VflSAu7t4Mk76zxw17dq+QK1KshDLV84nqxa44Sr8wFEJY+uZ1WIwX6xHzKWOjRtDItUEWd0UtbtXO+Gvt/ZNgka9ecBJYBMmvEgb6ygw07AE6PM7cKlgaLYnJjoCkDDToXrWIrJdPOGojYl/yK8IS8sIu+0mKnjZk7bTo5owCLLLR6Y2s283dcimdklzDkX4czjw0i+cSOV5l8PLVuyNUfcAqMa4pUtcCJ2QAVAYx65eeO4xbiN3H++Yem1KggAjkBT3112LemUWWEh5AAqeG7KohEzHdihJNas10N8dPYYrTFBKBc0ekVmkv5aQHzTryRSayibzjkawRAwFVQULvH3rb2bsKV8xGM5pGZmjXRWeCo+ouPWtAqmlFxXM//QdMtVjGtR+gpGhFc6WlNhvMqmt+VvHe63y31Toet7buGW3BZ9OwJKxagnU48yjy6Wmm+jKkQdEtPzPDtNYEmWQjFD5yf2tZta5OnRMVvOlte5gy2+zG/EJhqdcGLgLGXcIAi179KGmxcLKoNFVT0y29HWCgKfv4aSpy2tYLlZ6ju3N9ZdX32YL5tqveDPyeXrqEQ9F9ef8GUZ/fWl9pRPPViyxTbn7v8HEqzGah+FJK9OSfI63EL/zTKWFRgaw5EOOti7vF6FcyuxFk1mIoBt/ELIjDT4lKMV7SWoCsbpk+ETE53iRUXBaZbXrOKYfbocZR1zc7NwM5lu9Ob8Yv7UT1hHX/BnXWpSWgB20BzbVcWMGNRGmSwZCEBZ/jbG7uu2ZDbN8PhoqtjxH1LDE/QcuA9o7aMZcq54a9Fdg0NfBk2oI1fAnASeSLnzDb6YdRiW6bEZY8fEEFL8xxeYSC2JNqVQ6gbxrtLmkxwNBjQJuE6Tj5AE6I4pUH1wPjqDkici0wKl8HzAVlDz+ngAjju4/8iYobEJGO1OBtf0AVwrdD3NcONxMhxzkwm86fE08ITQW50dIwGwf9Oabv9Cs6ZRT1Xs2JWcgHI15YYxGPPFZ+u+qYpe6ff/UV7+9AwX2VZ1ZhnZCEkXyueYBokPniPoRHeycmKgDTiviEAv9jt9VJk2NwRTeXM8R3XK1czFCqcijznYPqFHU2S7wXm8OtZH38mRZfUjkiYdmEbQklxB8YkHuN7ict1CWftDyTxqBmKo4ryDxRsFGosmKhvcylpfu0hCdM+z5xxN3MLPB5zAo/bltbiQbb/wEFMs1u1bVv4G2HaXitz5Fd9iTSUPaBuDvRCCrmtdFk3ScF5yaJMJUpZp4DImmhX91PmzaFBdTtSK6sxSLp+Ce5TmYU0lZ/yejCQqNlM2g2fdS7elCIHnfu5tyKGAreKndgADrkgL79/8dHJYtLwqpoEY7OUo+E5kurKapK4jK5rr4HimRiCUoUvGqSS0NnprllxL1oSs/dprSO/+ZH7ka0ajDcdcrHC0RfOeKlD88O9Gl82SMD3RLQWHZu2M4wmLEMIKs2WLVhu8B0Rh7MYBYd0GvLsDYWq+0Irgx0fJo9c7yhCAbwEEmk231aav9QhdHCXPRo7KBHFQwdv+VHQF40QGD97pU+mG04R7ZFNeBqYu8oo19rS5D7qY+gdYA+G/bZoFzbCRjxUmRr/iCqPXYLt98M48Ji51jNhU6qxLy0aOLa37YVWi2nGFO3ypT+M1MZPIPnhKM0EeJzqqONQPYs1pHtKRItLEy/HTqu3Lighy3GuS1Y24bASWGbHeZpZx8l/vvxpIZZPrd4poyJ66dkiY0Je+hyFcUEjnJRMNSCm4+mFdVWZ2NLpthbxPjDAvBwxK8hFeSkWwCtn97qoS1wmixGBwRl4nZc4G1f8z+wHLBCU5bIXCE4l39bBe7IXPgKpmv2BoBruUqtcEfM+UqN8NVTBZiWS4r98DDqBuTB5ap6c2SMkbC7MvU7rtM7rtK6eBRCta2YG0EaxG83TtxdMtSqvW68QkmmS+7+/3h0WaWXQOCFrObKJxCHeGVHLhFUo1lsTh1doCaazMZjAbLLUjExgc5ow+xy/lChW69nIJ1sxIEy5e4PaDX2OR1rjP1GfdCjyTzoUxifdoAC+72/PCXQ0aRAe9U65MCSZYchuNvScISrW5Gom4oAZPfAWnVrTPEbJzl1YqGSg00qOsIXDg7qI8tfgN9lPK0tWINnKg6iyjmi461vywrC+6HKYBBOhAZdWi54cZxXRkgrIo/dhtebpdOyzm+GW2c+IN7wSwwN8rf1wOga2wS3/+ixrY3kb8p2+maNJabFk6XbZmqz5/NCn8zQS2PKK4OP1B9Llvo2cS2ejs5nA+33vwMy5CY3FoPwsPWrql3zM0pOFLwQAXxET+/S5Qia2JYIkmAPAQg+xMxGHjNhj5CZCM8TO8+I7WsA9XG56K0sx1SS9KxjQKJ3trn0DUr5J44K1BWAeV9OuYnckuqzq1+aKyfZePxof0oTa5OcWeCw0PcLET33a7Q+MiIYRoJhvicLj6ZrwFYyLCKeYSzjELJWhb3ixVZflRGOmRA11vlPxA3Uc138eGZbLJyK7sT2biOIzczRkvncpCHI+41HCLOBzrkPyw0+QH7Wkl/bK+TQQzaY4YoT79d5lgmO66/Y+yHFPxybCYOMBxFoVrQjplHAoCRWusWfQ2jbjd0mmm1UnXh5L99cT/aRTtPchsjP1rZXuIVAN+Kilz6j1RxNNOwM2UWnK8TrxnH+tY7Yfo5P3bAGMIe859DNK2yqSYeNBjPBqrktqwTiRbcwaevKnS7laq71iTPmyVEmYg6BKKTkjbr40rVfwuMMGgaqW880Jy026QnUBkp0FBDM14RtYxmgtksdIemwBopFYmVlxYcsSAdbdN7Fn+h9qNMYBEOdhsr0F8EoZoU/LDfC6NziHGwc0PDYVG4UvooX1FwDEHTg0IQ1zI204k1eveAVlhNlI44ijy4NA4nTmr2zJVyNBe5o2Dji8MPiR0DRAzFYz2+b/5pMgyuGhex7Ym2lcnNN7n0NAy0QgzmyS94lQniAYrYtY2vIr+LGdsehQ5pBId1SHRFaMjnM7NpIk1dWu45X1qdcFBNCseQNkvv2xnMQudtoj8neGptoIz22nj+XRw+/HnZdPuWMc6+Za5MZYkrjZgLQed7FiVgI77DysmiwMGSSufZLTO5IHajJeCTnhbH7DWcFMHuBn3CZSmVyRSpEevUjC+7dcgj5mQjSUDnwA+foVXa9VR8GXL/+HpXjrp1xq5gA49OnGzOItPVln6PLFdYXlDhQILlgLxl9yC9IBWDjeiGWy0aLkf6LLH67ACJ+4egn18+oXqY1y6LKpAc+S6drMwvOunk15mnosOUnxzuwvxBADfp/iWFl41jkD/rjuNg+/NInUmE73ubl8QZ7HymPi2VpD7rIpWPSlYInXfPESZgbKbb87BJs3wfIaGqJnh8XkfADL2fa9n+hdcFU8TPA9jctT2wsuXuri/MLJ9deNMzSYMYgPyF/2Cqp1u9ucCSnO3PCL4vkt5XbGrCBlcMT+/BPfLzA97w/kwPEU8FxLC3ovwrhfvRqWoRAlBstDOZSTlGTkZT37MGrzagGQd+xrQu8t++FUJTNo1bdRk8yYLsNRsBNllmbTsBGI+iAlQZA0RSCKHiyY6vYO2C243Sqmrzviev5FV+ePVlL9CFkghr0fnzeuQd9RUCr53gf7n877ih9GS5tIbZbrUW9jzZyeQH4RisaOVCBgFVWOvpXOKqhfowA3B4dAhXGdwGuB9Q8Xw4QzEgCEBd1YaU+BSog470HnPzvQnpUcH7K9Xpro1Il53Pab5aOlYhccXct9B53vYjvk7rrEvLmveQs1lO6JY1M5GmwuNVmSv+Opb78EL6fWq+USxoE+qA52EGjy7LKfPtaHbHbIB8FDiFTlGMUkdGRXWaiypTDQqiCJolLopcwSd5arRnVaXxxGOZ2GjH+7nI8e8rmBkSCm/OSvdyyf9jzhKuQ12PeDT1iDPhK7hvckb09xjCJxzcaJDEXfcfbizrbdDFczpzb6cxzZyMptHUO3QvjcG1SVs3E3qp6J0oi8BsfeqayYc8/Vs4V0d/3K0e6H8SLXnW8inBxJIzjOR2eiktziZ/GmRfnq+76goOBZGsxGilTPeLVemoAEM11qzTeVfQJArDY1YY6ETk1bFr3Kj33aNBIrLDGDaG70iB9n1qzBL0Vjy3NcAxVxOkZ4x/242nFx8ibiUzikwFELCqWh7JVQ5pLqCvvmI48pYRuer/syFuVRtSMK/UTww0xODBd3NhvW/JQ2fG78/+kmHY935ZmXUb6BWYvxssGykSNyqOplqIQ0x9F4PNtpizlmJ3Ky0YZAwMgpGF9sIY/BYXtgUBg6ojAYepjQCVVQvMpwrqAEkyMrlUsaoa5YXxv/WBkMbCdqoCHA2Ggwqh05uhnHdj7Z8JJ6hQnytFiJEgPIhcO0MSSDOA9aotaWD+F8DNpvSIUQgI+f1mxFBnlD4XLQXJTikkHjuSHPN5ip2LqKnylOyllC/Z9YouU1aYvZJo0zZDcKBZHMk+B6/RM+aW7T+2Q6HR9mGxkDEh74K0sBN3PjuRWlDcG5q0f1SpKpqqSvpjZq1/2KEjXlX/ucp9xKNUq/ANDv9q0yNT+f7Orz6lWjkqWqWjL71LmlNmk99X19W68jKa/s6nH7TD+iohWBhjxE1Ff1kKgEhLnU42fV5h58bUj7Xjz7U/91eLaXPwvPLq/vzx2UModHFSl7SOYgc3NyTGLc09eW/BIegwamzt4llN57yzLna0FVLX4inkV3aPykapyUv9HQsFA2SJdPK5qKgapjYltnq4+xjnt+PLEgqst2nuY/AWBeNba2wDaW10mgxUAlzCBHpSpXzn4HTsqkzv3plV862s3rt/lLRy4vX3TqKThwFilw+9aMBRuRUP0ZKG4NuD1nk+6wYuszXF3FYF8BM4Dxc9OKqwjOuR9WFXh4zEBKAM3Pg4adNB1Ra5itVwLXEVQlp1v/lwXXa3UDedd8xpNRtaI1Muq+nu9KPO/Q84IIlyYQXDdR5wDv5SYulrAhyPsQC4toPHZjve4cG/lZBNEhp1+t17Y+NpWnVaxX8BiDDzftCNkNx/9gKOifXhYzvoWfCBP4AGIDm5WroZ+VEQH0QKpSogkdWX+hgCc8+whBkuD0q5UpjVKgiyX50fMVNhAIXXZrTIaS3MXf8AvmZd6pzSB2NQCgtjwIHZnjTrJZwsQuiydclct298s36MxhV1ith/ZL40owIA8Xq3q+y28qXtgYJwAehK68fpcVea/5bW9IIewGMzH40Vf7Ss9o3Rnlch9jnxgr6ADf8XuVGrKFJLkIJUYlEXoeWoX02grR36VBLYn7WcbQ1epa/YlMylKgXHcJ7DKDVBftADb2n3xT+gfbioQHwF6fWkVa33qqn+IgyQvkDST0MOHJR8hUmYA6O20XiWFcZthB0wNrlBE/lNiljRGxwkYI4rYCl/3lVLVLbYKAj2ERJUZ7P+gs+dB8moqlS7iwUzbtXBeJYx/HbYKli7SKLDVu4QIJSL2gHxH6wSruxQqzAJLwH+keWjeuDaO3pEBRX/fbI6KznsnaIphDuLdiBgFVvkO0k9izLMkppE5OOmP/I2XAWhyVF6eLUzHIEV+/jSdQaTXT+sVW7excdnRTQte4Xxjog/lmK45Fk461iP0dDr0QJEVOkKhNHNs4jMrJZLKK+W4q2OwQJDv5uDEF89uaZ14OK5vw/85pBEPaOz1I1aK8ra+qJtV8b/HaSo8RExmoyGOjChPFi4O5p4pT/LdUkJUyDRctWCZ6qMo4nEjOpaoD65VUSCuM9VpiCDYLv6igXFNy29Y/5I+iCiruqqSY8XFHOcmiItYgu+IfeCtwjvz/X2Rga0KGyAZe1SCvmuMiJBznKp6ZE0mXw0Vibo5pDLHU7zi2ABbwxGcebn9B+kjBsgNw6YP8/niCQzwuTztS3ptElp8PKPBqPYXDgszAWLiTOyhyZZkCSweWICIH5oEr0C3JaAph7PcCBx/UhcJASq4pT1cBs6NWAkhUrJl5ZRGK2nydbdaaycZd76r8U5H/q5PwkSwX7EqwKpLlnBf2XJLQwAwlDgPNeFIqedc6ypdvjma4hLPqLeelXKgclkqO/T5ZkwLIKpUGSNSsT0X2Wb5laCJ2F3B+n8BREFMMdUn+1whxXZprBswxtFtwb2QsKkyjtifg9jidxtKtvDBKkW3SiqzBI5ekm0XUxZV+gvruMvldjG0XfghzT+NYRPwVbQjjTmb2yEfpu+hwYQalZCGjVsS9grXl1aPCMwAimpUmCD4uSyd9tiJu6RiyKoOjARZ7LiP+KuJzDijlgtMWM3d/N+k50/USjNklTVoda2MJqR0xxwqpDFz5vXE0JCjsbvdQEWj6SrjGjzXMUYced5c/rZvsxt+WyTt6Eeh00J1iLvjoh8+FYvx/7dX3e28HkC2apvs3N+3sqYg1HIVTSZP7HW+hsr93P+jdOREc/6YdTyrN5DYaJM+9SQJ4yf/v8KArNybYe6IJaJ6BSvjAeyFWD+4cOtmFqijNik44KBqEYwL6qnJhkcn7kdIkCCXOP421qj0wNTppgvD7Wm9Lij2maNMO+UMHt0J3pdQK90BHstn9MOkCcuubuaBFgyXloz5caxbESZOwW2NQKzUiKfXpsaHHj3qL6IAfLIQf5Agt9YtCIL1yKgVeNU74UHkBT9E/b7oDQ4NlGgIbV6mQqX9YAVAzBbarQ7pxWSK/P5JC83RPJ/GZr1DRaNYaAvP15GLV/KoqUs0B/W/VS4HbqZPUAZ+0szP+EQK4Ibrx7+1VpXWSoLNBKxVxKY4D9D6hGYv2R8I/mVgDDRUlqko1yelqJPxf5/HfAI4dCxMnJZ70ZPXFN65VkWqqMt4Umw83oGJBxcVUspbdXjMk/fusG7HLBGNGw4xPyunZVD6oQFDVrjnbMGf44r9OEXF+F/ifM4+5JLXz/K6baFP6z9Dmo6LtNeWWoapDO4FZaXw8uQf+IcnVYnJ1MYQvinrzTxBPBlF0UbC++7470TxPtjq17aBuspX1Qjrts122ysxbjSrT7SIalsl2LThT6t6nHpZBRxWpX7V6/Xwv4O5z5lCXXUhtZr1QlXaOB/+xVPUJ7xAWDM8OrjlZeOOEdjlAdH9hd+fXCLUmQh/QsRarbtVqy8ytZ6KNauvlEcZhc/sZETjAUxR5nEK0uGazwwOmZTxMM+eEhbh7FU7+Uz2KGbM3UDrfXoDtwV+ip0rRDUnhzSOqwNKxt1UP2N9vP0EuLeCNUccaEHaBaKJCl+GCaJAggoZADLdso/+sm/H6tmPbjhlOQrh51qbZtvhe285wxkPZwfUA43rtHu3fn8pBlYOCCrHqAWF20DtXXWG5no8Tf/L82VVOOnRMR/BRXW6UisuUlupkDC5ZpYrwzKGfgRIfhhNp00Bt/r3Fdbb/m3rJ5ra7/MRBBl9gjJ/VkmwYYX6UXNMcVemk09ijq/G2Ig7jhPe+BTcjLCGLBMgnPAkJWRm7pig6ymOMk+K/u7Zk/ed7JP/AupCDi40H8x5IrKQZJK8GhBYazMb447qU5PA/KO0Od2vHZQLe2+QsvfQsv7RkQZbylOhTleszV1fLpLf5AZSdVbDbcx4hzL3EOyZylYD1utBKI1TVkk32Ct7k1o6WNwgJ698dag02IQ688/yqc3ZrR2w5zwZ4q/dNVedbPABdtNbBQDxtge+WrM2Nu9VnknXLpD3ZLVgGca6bLNIDOEEawUEm/YeXIKQ6EmlZeYcdDpbrenwYoiM30ikJdV6xV0AOUluUS8UHfeHxZz5ReoVkPJLO9sLTwTc0Evh+0MkJAbeq3gMAbgxX9NOJheN1VB24FkFcQ/Vh4KyGe/dFG2g5Hhy7uPX3znC87h52BYwvJ9T7EZcHHntcVs68bZH9FLD2f4/VkBgEvdZIkpMoEAU1n6Qd3nJcQtncwRVFEo64HqxN0eDITX6mXzx2+LtVXThGZWLgSpcqXlVDcSQ7aiIlWVD6t43Gej8WrfDkOpUkTLKEYX2xyHbpH2rebE4DI6W6pJsta2QdCf8WeXtyffPQbKZPMak789HkfvrVIMxjXKaPC+O7nTACgNwZseu/CpuZHPB+R7BfXPb7wd5GBtlkPBdyLbVHbs4Ob7SWNyqT3G83DXttTj0r95G8arwZQHIiwFjul2vpypEnMnBlcEE+V8gawHxCMHWN656K19/8gMcOl1zZzcU6kDZvjMNDnpAYky0MkgohUu6le+gIGa0n5nF88ul/nn1VwDbPu/aAFfrGY8rzIDHTpQr5H2fV5cUEh/w55Y2aQYweNY2SL2z+uCwkjr0v8D1MAdrfkQFM8eRX5v7sW3Xd/YDbpPjJ4hAXKfEE/ohjXHJ7Z3vW0ftldL0doF9YfKihhHV5KIpD0UcMpBkv6hzWmUXOK6LkBcbNPgV8MUCSeKQfVQ4cTISgpN6tAVgAmlnH3adhAI3EqP1h9rhwMlldEJVyH/FE43z1q8fTxFnfP9YPoiNx9+Z9av5QAW00gact7037V0tfNrVsKdJtN40333xiPFwFk9j8NR/TRs9XjAYWhjOHz3hZ8lTJFT11e+LaOqPIKutxZcj3QocthYKia1oYSOl9E0eR/n+DGe8RJOKGuQPQ7tVB0coWeqFKEwggnrgs+176mZbwLM4p8H5nc0uQudRsro7MNV1jrhxc5FzaGg/czch/WgS4MHGQ3vNsH0ih+Z2DlAZF2jDD3+c5JF97jLUGZx7tb1EXqapZr0iD4Ps0X6hBsBDXqskSrUHbXz4a0Y8tF0466hLKJa9siPEUESmHpZMdy2pY89Y5D664Ov8J95Tmh0sdFMKMhixtEnuniwyHBc1ZSOKJ0LrTqN6aVBJ5rFX8I9W85cm3lXsfxi37GA+tVJiiZ2hofQqbIOwk4uOJiMI0ZFVMMaFNd9FwKEhad8M9vF7zG1y5LNYDLGrorbijJoeK4pwKDEic3Yx3I54ZyQmMGaNPil9GuC4hY8WRKabuntGQ00pC4rMboigyGD8C2V0Ij8reVDwHqXwnFKY7ojCenjpW3+sfZJVTiNaltIQHvEBpwCVadMuKO1+ecDdSoBNTmznCD+U4oAYAlngiZZLUK6ZDUnKZmdSwjoIiLTF5zlbxwPaUCLtMZB8TjQN+TAhSujkBwmgGboo7fbx4BUXzYbXGfjDcIhY0Gq7YayLPIcXuqny8tpoIbykgMH2sg4Wwybnc3ohlGxriJgnyWSqYBKl+GnSZ0KWgjwhmpaEykGQjmUlgMpJB+KlrCWVtrM+LUuAGq0dzJ3+KOeQBBZZbTB4NbZM75QndM4D9pLErZgzVIojNwZBzzEs+XhJMVnvubEpYlP40X8jfN4UMhPylROR53NBuYbjksPvSsNBXZKKubPFjjsGkTT02kCumYwQ8MMMaU90IY3lmUD7+thEf7iNfnTIU3sm30nRAinv3czjQRvBmLudiNSPl+1goNCnzrsqBL3ZP3/OxcPf+VzOCtEP5mJ+HA46WITzboDayl9PxSpiZwBSGZr+sb5wzevJzpUjPSiE4b9MGXFgz9OIIMe2V1plTyGDJVl67GvOEB5Vpwrx8MfB30RP8QSFRAC9SXGZk7sbUkWmmIa7jZ4+0qiqL7NJZrghLpa7E+TahQTVVSsORG40cBA6b2HHVLA1Slmm1VG7ipitbm8JVDby1E1s+4eqUlifNnENky4BVd9hjm6X9AEcCqzJPEaO43mRFCSOebl4Yk+2d0x/sR7Nn5UpsSbU9L88jwFza9E7K57TY0X0roN37eklVpG2hnPqwtADXm319HSQg+SfTmdrPCtyOATwygYc9WbExkWqBoAcwrsl+euGsSZZjc3AJLg568YrbmwjWkg9ANIUwQo4Kpl9PDZ86XzwwOFyBlzICusbOGw3DIyMi94PlD0aSJk4c8lYyQzCW0CwTGzWxzusrOQNIS++BwWIWIkSYGa6k/iwGH72BFpvBjy7xESA0wUrjmob8fLyzG1LbHhlT1GFtBKy4ao53pDQuwkwuNbnKVF8bxI8hq6vfUynIa+N0hTsANq3IK/Z7Dk8UqP2SpkXg2idrHWDXRBm3gvDtYpj8YOMRHxdJz7poDpWFMUDPwLJ2jxGtXssK8ybKabDMeU8j2hiSVDEVgels811wHmuQ21k+oQqRXqYKgIogtZsAAsj6e+nUV6wrBLVIwowRLDfyGBGrjM4W1eOi4Q6qWLnfT5M1HmRLhjasbaNH3HJ2w9qucts6Ad9/y4ND6qy/XAaGddopRKzRvE+5NEPRYOFJtcFgtpJa+0v1BRi5EAwbYwI08aneEJ/bHTQ8FC0XlQrAsH8sAUnQ2fNEdMVEIEiLS5W2vOZFYzjmyz7eEQds+hn4zV+TOyyAJ6JM71lCo9Ovslt1VEQZGRpK3pXno1wCu53GpcTfTfNLdznon7dmUN0a6wsxw1dhABP60s5oA8FzaPW5/4e7NKzrIyFowaryJbxWj+lr9gG+KvmVtZBOdtKxevrAxU7OVGrFMxbf3LDuT1YBNMOiNhiSpeAjWeKO5ZfgLCdjzb2tSjdV65oPgZWRazn0PJXeSE4ftkzEjpqENTii9EHS72FYHepARiekL7RHZ4bH4vrzX94wtHA8B9DqAdAEJe4Nv8cWJxy8uG701GCWU/XFnJSNMSWuLVuqYrIgXOl7J4spqOZrnfxeeksWkCf5BWaj20qJb+9CH4Hz4iH3US5Ulz7IOO33nUWpa6SWD0mvXdjlAM6kdC3r3YpurqnNIj4crIdzxaupjLgr3cZBWQw1gSiemrw07G3Nt4+EcE8KLE5VbwZNefdNWAlwbTS6DGW/SZQgyiUDXZ8vD+6AotQj47cWN/eIVkQSJkoUe/sWG1upbpXIZQ7GEkPEVLJwUEQUqZ2ywJd3zUG8qw9A8COr4LIg7PqhHhvaMdTtE3/kDNgT1lB6mpiTuKMHfaMRAgWr0y1c5lhz4Xk9vPMkpflLPpsh0m1wNoHJC5I6xvE49GCnjdRUKXAa2FDWZbwdOcZ7XqRsvyrO5oWEtg97dcADLzewewAAgARumwSJDF6yr8034ZIRy3zysbfd7ECuMogA418Lstfsb5VKe+OZArqV46f/MbUjdXmh+vc9FpbpBpm+TnzW1LtKbZkgW1pA2Nd0w67J9yKikSMYjkYfJbPfHhN8cekD0R3X+fHdrlj6W2j3uSrD8/3X3OcchBu5A/6H72wzkKc8flk43Qrl6u9RTct26cY1uF3ZRu7lLgWf2v3mPxAG/+HDm8O+H4eCzR8AZxXSko5uC5nwpO2BZ0d0d0RipUGXeQrA1VJoWHYgtP7iqRloSAdaJq6hDZ6a/5TTK5am+F5nLC8H+3TMa0+izuM+uvnAdh/AP+P0w5VGnmp7Zw8IX7wJAABFBkQxCHM+Sauc+cEgIr5m7wOFuXJGZWdg6qmja1qWUdtgdBVuk3theR9EoP31EZgNAdZT9QPhsxllMTB1UfOoeC66ECzr/fg31EYvo744i7f7+Od4cDj5Ubtjoe/JkLWDvbMDClf+c+YQJstuK/iFiIoulA425PLsZSglyOIrh8n60wm3p2xSYrfcaLK1FEVlnz8AuSpHUlTubPHB9o5MT9K4HORcN67/A4wZTJxrfwRi2IYqFWlno0LC2HcfdfIRgDZqQsiOZ6dfDdzuXX3sjkIw/UGPhvF5+ZMKtOba3nsLyEvPomc2FXwvpSEF5R1Kr0u8Oa19sb18IkX6pyWtMQfXbIQNZ3ws8VsfTjhS5G71HCL7FdgayaRjap7KiFTNeqF/9SeUd3xyhv/fOj57zdeW7jblK1t+7Xbopln3JhHQrVxlWz9NyxcohGGX1bYFXa3IxGOZLR2Dwt8QOr3mnc8mOlXOKeNDI83GK6ia0J6Jf8mR7ehj+0sqvpiuP0tCnVQk9Fj/IjtHUq9xavwzkYGK3dBqDOUtD3ZCztz8l01R8hwtU95lvS+K7rQIe8PrViz2PPnfaf0X7d2XPvPFDRLFqz//36OVtXpW+j+Syb+KAtFkdgLL71/rvrxKky3kjXfQhlJs3MWmauzICJqBqJlbYfs5DxnLzYDD1MImdOq14xJnPSYjR7gv0ylV5q37lAaGtAYkC0TFs76YTzP6iKelRlNLU5w60XYkIX1RDDPY6vmcKO4HdgKyD01SlWQX3YaPdwSqCUfdlV1Ol+PRKVz9gKewMwNEJuUpSNk93LlpzW7+uAOqOd6D1/zbr0pDYXbly30pLj/xUlfSLEkncaPIzd/vIIBalOrMmcQ/KCZdo4lcFMqfaMOaVCVBDeop5CcdTnCaXmQyWRaJJV+mE0yP4JWXiO++7FQ1GDKcpElh60tYnw+xgZgHkM7SsEX1j3EBesKoHm116AZmdcSW6g5lp6bw/ZoBLHOkDHSmO54vK32TxxvkxDmV3sBlusW9PdZJsB3M+AWcL/zU25ZemvV3hmDHANe9e017v8yt8TQTV7HEbH3HPCTy3tw8Bmb/wBWV3o35YcaAADAfAADKfl/f/StQjBpimJQ37Dqa0XvzK3xghn5dWRRIBx3+klmyp4fzIBe/1R/hv7uZOc3AJ74CSNSjH5skoh4GgSjp4anwWk0fD+wdD/rQH4Cn2p6TN/LX9MOGhAMsRwVnyZH48FGxVn7XyQLMVDswVsC5r9W8ULwziWfQ/2QPoMsl+Fbg9RAJw1ZLCanKG/XaKvhD75K8CfxCSkZOg6ZYUOBI5Zzbh5GGD9JuzBVvYnKgIb7MtYrw2QRwOVZNkMcRJSYLqdfKZs9qjH3rKLXpuyD/2jFzv9E839N5xSVyab3DoX3qmSHjE7U1b1Sxv7ovvIj+Ae2dK9qK9/Agg87yMfpeOwYBsVTrzyku4sW6VSumPpxBOkiBRKdqQYV6ckoT0w8bqEADShBoJqzUyPNWV4/eSoS36zvYvmGLDqOeF/XAeoz4s873N5H0rWj5F8lI8DTGr6ba1IMdVfspNCWWuP0KMKeqlWffznDR87/3o7QuteiOFP4fEqOIx/uooksN5E/BfXx74T49V1/QtN7raA1wjRmHvwHKdNGv81Z202bfjfdea5M4Jm/aZwmbKZabxH9zxWpE+RNE2Zu8wdvy8ee69j6YRouiBRinu75X1+BFxlSYlHjJRTNAsWXAOf85d51f63D+Hj39AZl4KqBBXfVBiIYV1RIHMsyTe7orP+RUk+tOMGm4ujlX/nPp/EPL1GtPOJ5b7Ctq/KuehRfuFtt5VoVcX+Jfc3LLZwWL2PAmhSlfXGac7XjUijHY6Mrs4daKcDAo5RamTcxwoYmAMJxYyQ/1WDBD/ajwUKl0fiQucF4zwnGp3g0NppguJup8iAt4fOpqCXYxJhkkMTkkuP7k6irOfoItHuErB/uczYyYeCE4gU1v0ObE/EjcW1Dm61lCxCZqtv9JGw8w8SFn+mWXtjVkdthw8j7iDu+79YSBblFWpMsznXpFAbJpX3oyX/iRqF7Uv0WdfW5vDQ181aHYqO6bHnQNT3AbR/mRxzRoiyE9rCXQYH00qc+Rihu+gw2F/ccPih1ccUGX0VbE1Y6ZMNVbpDHLPG34DuhQH7oHpz35zR0/7DpNtD6QJSut35C94gbZ+kBXtVYHXXPp4Zfg6TUx4F3Tub/l14fhXtvK1DoJWWsFh2iCvL04wemV/fptduaz7rmYL8PPsTa1QFnAWZYmUwAPmGrRhYZa5BkD19QLh7HvwTwTx1fxXKiVAuxMWCc7pZohwKC0IcyvkxnIMnVRbq42W3R7EiNNQYB1NnNNCGu5BuIANXQpog0I6JOrd9NvRwzInFlflAtEV+ikz9IvqeWS/MQfvz0dyOsDVbn9QAz5Nlmtxxtpl3mA9NRWyZd4+MgwCEXOQ/nZkedDV10e+I2Ql4C4yrMJQAE6t2fDvMvmC/7S1q04s95BcVh7BZe9akgIeL5CQw8KCtrncVkzcNLhpngRlueSD57kxQ+5rXsvwMwJiVirDSOarx6w69rmv9UzUYYwH3PaOez81YefhHRYJinL80lOA93OtDOLzifwywJ4+wWOZMcv5O7myKrwW52T8sye7olhYhUfVPofKb4Uhvxe1aQn4tJgF/rBGjoWputjy/EbO4sAObAuddReuPOH0+5m9o8vM6sKySTjgQukQkh4YZyJObphD0/VnRBOJa7j1aIwLb6NVdNUHT7jX9R24y4QHu4qXcTIUMN8hJZLLzJIeyXxQ8S7e0EpX7t5Cwx4uuoqdbKvw6Xja2K/Acbh8Yr0dqX30iOJf3tWMryQRWdIQq4ekKM6izrjFzhvdUs/nNhBYRa986xSmG4dRJPhh4SKu3DLonNO9KWw7WiQdKVsWNrEb4agpclvWHmEjqaBYnx7L53mmQIBwx8w/2I2caC4eZ6Sy+KOkDG+JwcN/B5wIlT2X/mm53yI9ebrqVVVdMABdbFc3Pjix/lyS/VpgFloQT3Y8OYfTDFrx86wJq9L+6uFq3rNM7zNPfOiakMAWBMX5ovrwbDhqpV08yzQERvI7gE922W7KRWrQ2KgidYpo1mBx/PTYg9Pf9RP9okpH5tp859GPI6nE7qOn/3rU0CuMzIWCG2+E3fezG5+NSPpfOYmyq88v5/A7BFRMJ7cTzFhTkhAM4qB+LadcEohHI141ZnqPfaT5evZWevnm2+wKuf4Up+cLuRW1gb/W/ZICF84eIGKDnRNV2JfltRsmzXv6Dmz2EZoTELqgcQ4VfucKnOJQ1ax5F9UXnkNjLAQ8wRexIPE3SRm/h531ayMw86LDK3gX+mFSoww1nO/JAxt02PfSerBE+sRZCts/iU+vhim+ZGtO0fza4ojeB9RoEx88jV+Z1E9NH+2jBatxVOrTyBIhKsEhez3QyxdP45xlBzoiaFOhlKqAuYby3sgK9ed/TDtSfxGMXrwjDygcKqaqpmQLxJlgfFbfZjTz5eHKL6Ms13nVnTszs8BZF+f1jmhyRuPFPurXs34Iz0MeJKW7n0thzXRSzdAYDG00Gso00tTEfzeWeTzUWwB0nh/ZQ8yQvZmKM+npzXx7PVvTuHE7C3D2u7dbiDvqmSjD+MI59OooUxLxdaIOc9sSNlTKk+Bi6/1w0tAiTX/WlijVdWtDI8sXrkCMJYxMJ3QTzqAI98nYAs9g93qNTbcClRYrO9zg8f7M4Q0ju/lcoHtXPPE5rF9DHA00/p5kjVdVanzSpN5m0K2WXD5SD1uE3+lgEAU6tsSbhncHt3ZQ1fjSHb+b2dDtq3wENG61ezbpwGSHc1bm2WuQZeAIys5S1sRPbiivk9LXEYjbtF8zclWxYl604rxtwdpmZj46zlSKgdCrXb1EReEZwG8cPJE1sznsqjQ0Y4vTnwSgpBA3xvoIYngEv1wkADRPRiM8XfbULh/yahhmeQM3TqFhhA7nG39WCpUHD5JIuVXohgpiZ+ij+aoKXW0sUe0KRHs0T49XnkaGeydgXdzB/y1ekBioVLtsehD7fQD9SwxFi9ZM2CENSqz17GGN8pdsyy0FoTdybsVSkVC5DZxRmFNl/hpYpQbGj6YVW7d3kvTSjYURzEa8SrRjL/5xJ6RfkqQJLHMwo1PDEv4irBiXTROyAVT6l1vFCM3aUlyEv06tOOh6oBBA1QVgu2uWktedWsVzTMfLVg/uO11fP/09i9ir6J8MKExJy/+Axt2B9M8WGWO2ivLMGQGoKIOVdWiWmavHlS9VzhykXT5qaJoSCT6uKeYCWK469EXha+N8itkzNILJFQCEjacBrSfOFQbkwQFJ1l3vE8u/s9ywC4vDov7KptVzQ0rprUT4QEZ2OsIaX7tOTPpC+DrCnDjkgBei7XIir3WcqAutptdh5EN2OFwiw3FgURV9z3i4gXShS8RVfffkQ1ThLsQi3pLpdy6X+cYQeszraB9cdG4gJkgwssdB2gXV8WdtcTD0zEGLl/G4O7RO7LP6ufuoYAWzL7YH+mzVS+xgjh3J1CSzz5I02Xw101cvaZc8bp2dgMIGJ4vysMTYQNcbL5TF087agcvUmSxFPbmVyPDmGaYRbzrFOra+/F3nIGD9p4Fzfz7Ezw+943D7zg/G38Pe7756NwCewD2BoV2Dc592dv7bjPVn1dTqbNHpF66D9ayyBIlJyekyDKtwePjHX+lJWLe996Ka7/OJaC0pyIcQGizBMSuniM8kDaAK5vhZmNTr+/8gEbaQe5r+J4HHmU20wl6IDK3wfN/HcW6Dpm04bnfAOZafxOm8Rsxr5KUnSQ=","base64")).toString()),Oq)});var VIe=_((SJt,KIe)=>{var Kq=Symbol("arg flag"),sc=class t extends Error{constructor(e,r){super(e),this.name="ArgError",this.code=r,Object.setPrototypeOf(this,t.prototype)}};function HB(t,{argv:e=process.argv.slice(2),permissive:r=!1,stopAtPositional:o=!1}={}){if(!t)throw new sc("argument specification object is required","ARG_CONFIG_NO_SPEC");let a={_:[]},n={},u={};for(let A of Object.keys(t)){if(!A)throw new sc("argument key cannot be an empty string","ARG_CONFIG_EMPTY_KEY");if(A[0]!=="-")throw new sc(`argument key must start with '-' but found: '${A}'`,"ARG_CONFIG_NONOPT_KEY");if(A.length===1)throw new sc(`argument key must have a name; singular '-' keys are not allowed: ${A}`,"ARG_CONFIG_NONAME_KEY");if(typeof t[A]=="string"){n[A]=t[A];continue}let p=t[A],h=!1;if(Array.isArray(p)&&p.length===1&&typeof p[0]=="function"){let[E]=p;p=(I,v,x=[])=>(x.push(E(I,v,x[x.length-1])),x),h=E===Boolean||E[Kq]===!0}else if(typeof p=="function")h=p===Boolean||p[Kq]===!0;else throw new sc(`type missing or not a function or valid array type: ${A}`,"ARG_CONFIG_VAD_TYPE");if(A[1]!=="-"&&A.length>2)throw new sc(`short argument keys (with a single hyphen) must have only one character: ${A}`,"ARG_CONFIG_SHORTOPT_TOOLONG");u[A]=[p,h]}for(let A=0,p=e.length;A0){a._=a._.concat(e.slice(A));break}if(h==="--"){a._=a._.concat(e.slice(A+1));break}if(h.length>1&&h[0]==="-"){let E=h[1]==="-"||h.length===2?[h]:h.slice(1).split("").map(I=>`-${I}`);for(let I=0;I1&&e[A+1][0]==="-"&&!(e[A+1].match(/^-?\d*(\.(?=\d))?\d*$/)&&(L===Number||typeof BigInt<"u"&&L===BigInt))){let z=x===R?"":` (alias for ${R})`;throw new sc(`option requires argument: ${x}${z}`,"ARG_MISSING_REQUIRED_LONGARG")}a[R]=L(e[A+1],R,a[R]),++A}else a[R]=L(C,R,a[R])}}else a._.push(h)}return a}HB.flag=t=>(t[Kq]=!0,t);HB.COUNT=HB.flag((t,e,r)=>(r||0)+1);HB.ArgError=sc;KIe.exports=HB});var r1e=_((nXt,t1e)=>{var Xq;t1e.exports=()=>(typeof Xq>"u"&&(Xq=ve("zlib").brotliDecompressSync(Buffer.from("W7YZIYrAeaAIofn/qpGBmjpZVwDLAvMwf4yXtBPC2k244urd2MomTN2aMogfZ4A7OVKdZytVrWdTrWmYxircma0wGjinrwi97kOIB/rfPvf++/N1nmkwua4pdU0vplRnJ8uTq4/IAsPFlgkUtfMXWn1Nm4s4/1OdO8sUK02YQ8V0UUTasGUTR54r1eZDT0Tg+dfNn2bSIN6Zw+V9selvZoGapDZBTNJtWlu8YiP8VAl4vuaHrmqbStPqWMGWi1ET+Wl8hECbrj9M79f7pp+KJEBcE6TKVEriNY6xXKgoIrpP3yOOwfyPgdESROE7cD251tzuvu9hZjDLwpDcErDkGhpVUc7ZLP5BvGEEUjaLZdHaf3p1wpI/ZW6ndipAYFTca6o+3B9iFWHICDGbsHGBmmPDDNvKKnyOtjGr2X7Xv2gIEIo0IUR9fyzr0RFHe+BekvwQ8A7azu4PX6uXTmr3kyZ3UxuE0AeEwE7s3f0LdIJcvAtlstfAn45Em6li+lMmn6NJtkeT0hrM6hZvhjO5NFsx6OvLtoz8vjLzBCE2tq38M2NRMff1r/HFdUdxSA4v2T8UzNbJfx16WEjKmYryX6bLx1Qi4KkviXx2b7rrUxmOfmjBZgdsdLqS9lR7LqgGoSoMNiKLAWDBhm2OenIXqbIOID+RvwRtjzFzXwcoDeaECP86wI+AHGNpQW3WAPb/lwReQ94/ItDUi2V7l5TD4XFWZ8iKTQ12efZjmhTFHWDF9Oc3y70FuMb4wQ/I8qsKeqfE1WVz8edT8MeF67oUi2PlFO03r1CeI4weV1yCaDPmoUYdmMNiRTHsQSNECB+KvgK4BSAsq0qMdK2hYiFg2XXS+o6wEpuP+WXFzRWVisb+bZhUMBx1Uk4qPk7VZ8D1ygB1KwB3KxGYr3qT58d9K84LMe4xPUVz65JDAAYiPHjF/WO1WnW5lxKhpqd4E8oB11Yhn2lsJJ6wgA1OHsJVhMgWr0L6mnDSCoEJ/1xNAVWu0xJ5jcBdoOkC7MBWt4wKYC6pZnU0L0/ZEun63aneuabhhBNM/ElZOVSwFTXhz7urfvcEdzPZNQ/Af/UI5+TJfwTyaXTx5P/jSTu0EjKokid64RDKPrpo0TiT4Dxz/C4cdmdvrVq1qtz/FZbanctieS8eT23qQvPgR6DcPtLjac8FFkDnsbtRv3C+pjh/rES8pqV/UqOax7pPArrJiAxDeArF7/TOfkGNdm1eRHltB0cWa/gCLLQmvzYGAzaC3oiqmm+BmRNUVYDye1Wrf7CoviG9h2bqkfb3co4TkHVQLpWB3sEWM6KCqxl98ZURki9KaP51AxocQP1YrTb71POvLimJLx1O3wgr+jrKYpnOaVh+kQMsaiNKd6vfUs58mCo8VZtF7aA3vcH2sfIfFG3JJY5egsfZCxbWam6tBq2rYQHOzGsbWIRyw4/RMQqrWdK0s9ucgjMyuOQBxG3s3UxOyQlvchbAK4PqV5NA7+s8i/LQewHL9ps1/11SMtq2rzO/k47/CvLVxu/VF14vKnSYvKDIgBp8YQYOrFJnbSfaKiCf2FTBdai76QQTPskJiOSQEKAGct1m9u99O1y37v5Ryvu1HnEnH4Pyn6/CGWd02gi3lBebEnDS0rjEcssB4poRl5wQ9ZteiikUd3kk9ogUkO3Tho11OUVtIukGJ9kbf5PU/PB8gGMrXP7OdPhPUuXg1usheUW1WSLUHYhseGbnUhLmToxyTdiii6DrmbM7eNWtN+y5AIGRHscz2OE8fUQNxSIQZ6hZlRsj1Hsb4x/m4jOawSTFI9FWpjZH8KZ1VTHRlu6U6l+DXBQ7EpQifgHFdiB5VffK4B1wq+IeaBjTsCQEBJBGq3xSny6qetT4lGrbfAOyCI74QeRBimUsmfY65mHj5ICnp/VFsAnaIZuAeBoI+vCFT6JvJoYsyrhaowcOo2Fj8z6AwYvLzPIj2f5esqLhnzs37MN5yy0LWnrJ0EadFAE9448ipZMWaTuelOG+8tWTQ3mIJ29XtpRgS0H42ei8U0KKuc5VWrPWLE1VGrFv2WZu+lBgfNBbBvu9yXrZH023WvtV9bhXBHIyy3a+EVXAI4JMH4ruzTys/jUtXVFd88jvMX7XmIjMgmHwEML9EdRUI18RZdXYPJUtEveG0iLRQEVPTHGBOa3STqzkApApn4QAndZyYwVctL7PXL24PCvPb3kKHTM3qbZlCZZUQ67o30+MCLu1idSB7Ko1KBlCBuX7kPCxvukHi1g7E0IUnq1iFOilXH+T92MGHQJfO5QsUgulZFfd0vWflcxXZD1lPZzB2XvF5BBbTLmzzHuhnTS4KnEPBGqXf+SofcIfJzD3CpiduYhveczjMRb1sXs46drNeQYTdLHw0oVyb3h0AB+z14AseDdgwCyU4d+RWq5Nk2qyWK9SYulIfQCzl/1IxYA0Zc1tsFTi7hVi4YJ9avMITOFjbT7JvuUggreBddtHy42woEaBtrl3C76tSSb1Jp7dwOMEratJVKxjLFKSOKc883wNPZuelXgSBmyCeRLmvoXVuwk90HGS/5yjGOiiLZDC5owKIhOnKT8u0FziBoIfb0VDK3P/uzPGyLNQ3q8Q88g1jxBae7ZindZet2uyHQxNxWbDk4cm+qnw48xcXQWId5pIu+SfEW1FY8nW5rU6w+smRmIG7Zt+CgiO9WZdMH5f8vmUZyWxck6ptvvszFtk6Zgfq10sHR0nTcxZuli/wscpETEZ2OfhVpXMFE+qsLO165Z7TZA1d1Bqmr2mZ9Hahd9lg7E8mT7YYUz9A1+3YRZ9K32VcOjPJW0L0WaPEFNbMFp8C74yc+9qBPFrVE5wPUCiQUF7VLXdWt+k+DK6uoZck62z4kEpLYA9tvMewEDrnuj6qY3lHSggl2aBf4QLEZf5GTaaaBklz+BsSey9F/Gll7EqpzrlJqi4ohTF1F5wpX0AnsfJVSAxz75XiSfSWwnKPzS9wprGuvH6wzu3HS/Y3D7Hcz4zt94iktY3VoDMBXIVU3ZhurAHW0oIkm+v8uQDLPzAmNcXoq1pGUMzuES7qoV9MvYcM/zWfYGdpY3mnjrlGUvd742zezvatOApsxYwL8mkF56vhqawtH8p17pATe1qqlQZ+5fbn6ir4u9mRFTuGNdjU9Kr4Dhb3NGiE7PFRxRGkDLHna3uExLPv9heaZ4l/IbwwjK5uX0Sz5fHSRBX2lntiN51G2bilyt53ibizDkv5bIKqCsVvYi5gM6npb/DHOxdOYFE7iXKH6x4/AIgZUk12lnNak5nTvZNqEwsJDP5qC3DSDSQdP/yQDL7Mr7VWIfD4/nglnn+Ol3aa5pjLQy7F4R1EP/w8oDypvHrmRGEdr/2ZeD9jc9qczNGvWVs1TOpaG1OWPaZ/FeGyqdqOxLql5sbNtLSLj+RigrA8Zd5Skqj5g9HG0R8woPZ8Isv2DI5UcFB74cxq5VF7XR8O+8rIDoIA0r8ZckbDl+z2XGW8kkGlTnl4bYsVvo2XOPalZQC+nHLDeDUjjrq45/Bu66uR6VaZM7XLQChJ6aOJb1zjVoJjGxl/RvOgbbEsUcg9jN6wHQVxz+YK1o4mIkTd9lr73hDhiGJmnrk09khgnZX1jZgXMvlXZfvu/4UzJMeGKZ8+tUdHXsL27CkrKTeN7GAv03B++NvNl3ScoeZpb00tw8A7uI70mwNEMLH3b4q+AS5/v1K0HXvITE/0J1tw8aOX/dv4NwY7+PyWxCzYkFIV9+BpMl+mrOMqJ+oTDH0P+y5oD0Wls9sLKBWmrBPVIBEusrH9cISnk8TJVBCZ+WuYp4oVjgVYQ74StFhLJkeVX+vnH2MZYLE4hGw/zLr1ixF4S0fuq5t1wlGdZcN3Ryiei/RvIQEttuAPEZ56X9DN3RdN1i7WZrDZ9bA2Y6QFCJL8I4FQNd0LAd8e28SZ97m49v3sySuqZT4X7yiKaymNsJy0h+JmUQ53oKpS7dI2CHicwn4nmdRaVSG8PMxr30O/p0loXp2VDeedkJ9n983Z06Xp9nOmvn+ssww+cEbjRzPuX7J+2BQZM01++bXQh6G+eFM+s+c704+9OtsQZ1bwnCZ08K5ZGvMyav8qbdAspe9+ft/QgINsPYAAJlYbcNG5yK6QACe4MsxLLW1T+2s9RJwn7N3Tlm3rL9ZJqtIYwQhWftRqFrqSbokt46nCJqXwRg36i/q7RjTmNCIrZuJc8Sw7ofcAIbN2ZDTkn/ySLoemB33MehW/gegbYAjaNvCCUK4bJs78glrWaysX9ai9TNgcwvRK4+FvwzKg9P21PWN4KwUt8/awmrBhg4sDYMNFJXeBvQ26BLMj6Rg/N6LrXanZNnMsidv4lcT58XgxA1IXpI0MIdVsux5r5bQtNBw0WVK1kTGNQSUIJuIi6AxVF0l+7Lx1z1dieSEoZA+mkP5Ylq4a4MKkLN8745tnSpG3PmlGA7XNgTGeyhijUEgFAHib//r5F5pPqL9J+peKzxJ0PvdaU8A7PiVnOqt8Pu6x7hdfJVmvd60uU7lShz7MZ+W0V3ifWezK/HicLkkP3nx3fLmVafZkIw19egheY8kUHPI8uHQcuhEaOy4pYcmpxzonwxtTiuhiUZ31qv35CM4SgUk4csI78TrbHYCCkvr9MLRSuVuz4VAfGmKhj+5+RoDKwhxJoV1SdcxbwWZ9nFu5I1jiu+ujtpSJ8igdxbOxoVTQwUXDjVFsEbDPKZ33uPtCS3Gib8Jnl06fKT39gz7DSiesYxjt1f+qlrYdKFPXG/uHojPmMAHfu6cIv1ufCH/3W0Ns9ups/HJL6qfjJsfW1cPRnlj122sQXqMt2P/4lF/vp6Lua1x9e48pQ+bsOaJUoH+HhZJhZfmsdx28stYxUj2zwB0mAiiNCXlG5RdoMnIR50mn9OuiGDweOpOKLuzCXy1d1HK9cvgsWsMRO7sA1xUaW3/Tn0Z/EpnMWIoaOG6Pt1A95uzncpFO7Enftf/+x94/6T13Uj4kwKj2u8jwa+yurOoF2+fO3laYMZon4KElVG18Pp8ThJqb5pfWXmWgMqIOMWeGRPByVkE5rAkv9DainSO805Arfc08Yuqnl7MkN5F1sq8Hm5XxpyQ7TpI8/j4dDEn0fNfBXMuuOhdCkbXBaE7ULhJTnFOAEdOX5hJhi2J2rvT+aE6ovLq0vJNnFfjnDyQUoJXnJ3brh3X+H/ab+10cRRhjOO+582DlAqxvXm8mYdkuEG4ZY97+Cy7fPONOY0jMNgUw8W6VqUAONWnGGV/ugM603iYSnR917qLJjSN5VhxfnuIe+Wu3pnZh4e7L49970k2Uhjfj7fOzjbG+1kWydmutpbBTL+75BFfLbNT0Br502jm6laNDgAoRYm7bBFpnX0GOUtU0n50Si/45IPV/QiRlZXdpDHFrHnUACn0a0rw59DTqVe3G9phSBlM9k3TFNcu3XCemc3uvTQbs9feSU/+HqHeJgTbXexE5ph7KqlM7jtT/Lx5p0+GexQuFZy0MmE7acbsX3twNvmnRztnoJ2CaML1NzRGidjukIutSTdkQ1htxO4xb7rVUTlFkeB7Ek0j7ykrp6ktH2nhoncdd9GzmMW60Fr4hoXPnUmPhe2xaZHTBiTVcytnYLvUWdBY2yX31XT8OeAuQDtVlu8xt5k/5kxrqeze3Up79nMDTqmI+u8BzVVs7J/sqH2w3lpaY4b/ZIGiSpQcMtelbSWb2kgvgITu8BaJvE+PTW/xEW0Q92LdM2O0d1RBY3fqStUpXT9W01PUug9KYgTsV5bzTndaIlS7sUc4DEnhHna/y6aDBELFrV9uSsHb7LFjYnLskmjMK6iW3/PxHXn+jjtnPk9Irst9XEfIykDfZQ9rNloWu1V2g2f9T8ms7ocYu7ckXI6/fj1zLs+D/bh654KaV+DsSbZ2EMB28fcVsnx/WD5P32wZWgLT2qklWDronQiwn9ZlvwLQ8W8j3D6vfGW8XXmj5Wb5PvocCsH4fkKAKXKo1dhiJDJo4EcC65eDgaZPec/bkWU30KVlJxt1+93tJq9eVfbXSJrME1VDqKc0xzxLWuTxB8eWmYLJXubjl7xyoailC9soRMWC+bbTKNSIMgJGpSDjFJ9rg0n7M4gvm1OMC22JOP0aW2U1IgKklcH2dT95bzdPG0293mh2QENp2u7CVj04wlDsec2IiKIMU2JfQKDqHHyanNmf7dTyUOVEzuWDm9iZMDy8Z5QJAcay5RE5QT2M4FJbjqqdWxbBBwe9MkADroHwk8lOsafoJ5iMzyozT0XuCRdoQ8qUMm2KR1LKIVsShLwekNZwxCqxyx2QYWaJ5T+37rKqq6DbHbVqjnVd4JurTVDkmoqwQhNUmv6YkTzZKATehk+2qHmxWZjGOAhCFj4t4jDw/PcvGfteQzOzAsvLJ7s4S9WnC2YHb8Pg6wGPt0sh9KTTPzjvecLHS5z1VhZRUTBs4geXXkTEbFa3rDXUIYpNGQ6KZ5/kbWMHD94uTT7yLBk1G3CZC/CrLVBJEL3iZSmIeK+DkEYxmO2cYoElRYjhlUxuYghY55e5Vu5PYOa/WGF9TEO+z64kKLMjgR8O9Oo8zPujvD/U+2ndy8ftlkY2GSI+aFwhYmgpPBlt82jUsIl166FQAAlzfqUD3S1xH37rs7Nk4ZaDSUIiIBq1VmccU3ky2+bRqWET6ztCozAykITec2lxjil+uPN2vnX7sPMbyOIHuNwZvDGDK9EvjtyJQEGjDdaaNDhjwVNzK62n59toPxVQsrh8DDTZRjINmKe1t5ad3GfXJBKYdAboyfw0KdPWW1mJAgYjxjdX8r4oWpaUgQyQIDk0qOvB3+rqSDbp1Xc49R2h5+5VjcuCIXZxLRHPmuM9dlZOd6+uPWlyGsbS+oPDi7hmn6sQDoT1wPRdycZfgffHe3+896yJJ1q3I0nZjafC4S5yX95xkP165eE65eG65kHWiTpNp+rMPGVedLK4BpCcE5FRbT2Asx8dNMj0gen2zqKCj1r4IpFNt3PM6YntBu1lOx/I3FZPdWsq8Mp2k//n1NxJRYFijdJwfZdlF/P+qZmoT35tfJHjyhS5+rQ0mI/AHBC36sX8Af3HUYizJ+mzNSUB0FWNGbE8PTHfTR2Bs2c3pPnjG6CuesDEHZl/zIviFg4Q1NaTyYs3Y52hAwOZKqgWhHiqXiCRvHCXvWYdnr7dumBTd4iud6Cuu587521YmlLWPveWj0G5RD4KmEykSYK0lAFIkQ/cuTPJzFAAyt24Y8eIomJKGhvE9DrJYv0njUniEddmu8nNRtrVkcvnxhxObJls7KaJNjz9cyCDhNeucjD+RZNldRu+l06d+4rFUPrC2c96sqN1I3ugDleefgtL2wNwIXr5MmMWeq0IeiOUr/F/Ku3rZS4PYzt6+KzZAXSCtZYYI3QBFBxg1JZ8XMwTXZxxVjFzp74LuExmVj7nnqO17MmMfsb9oabFL86NhzE/A1CI6c9s3fSIESs+J1Rzk8LDWTh3tfdwqZcp1scWKFHH6z5nihgdViBZ296XyYdXpLm6p4ztIEgkrsDp2nRwW+CVDb8rQx9qlk65hQmlgstLprc00evMTsmDoW/qxsieeiFOdhgsRarlPKIFVAi35+Z2vC+2wEzF2Crs20DX4z06bhphnjLZ7CY1UNb8z3lz6d4gMPTH+1nSxk/o8l1E/2o/p/1mJVxeco7HjsaLcTMN7lnxXGw86yZCTPD3BUrDZ8LmSalAA+xgQ45ElnJD38Zt3MYt22QrM5HaKgmmcQn+Pt+xxf8EzX6OuBmlbtjyNBl+m7MwkjFnHNHpYCAEhvw5TrjcIIgh8cr51VcLL2rjfE6fiSqTqDiteEVBP2fWg/ka0c+p/0vJqgxp63RgtKxrmyEMruMhXveJTdQIoHec229Y9rm8NQzLLCtgIIYhUr+POyGqlmzrC0hg+5AbvLUViMk+vTD/snwtLly52nDaBwSON6lAMJnULe9iVm7qyCGfwqolXl3hOUWDafo5uVANKrM7QFmXgROb3/WXM0CU5JLdyiaOfiZUtFM0F2xepBtOrqY2TU+yXWVDf8ibQ4ZKiHOLDCrasIvhRqaTXdrycvlCMGCJ15/dlndbxlrbUfXLsBBmoiWPs+u/tZlc/0Pe/1u9vzrv/13eH+993ra3fzkGDDLXL7Dq9sJAbXT9qUaTy4kmXdRtka0k+TKht0nu1xJwLIBMJ2o7Z6D3u34toEnmjl43WhtqK1GlvOhtqftfQMmIN62hMzGGNHI91u216azTS9ttv92v8AmbekGM7GBtrWXa77YRRzqsa06L3ma8LVN40aSn5OMo7ntQeOjY7I2r7kypr5xdpOoeBc2Uda2d6TG7HnXD+sU07bdxS8Hir2i1r4ffw+kTyfxhKLtI1Pp3Qq54J/+z322a++9gJ77HdTf6l3Zg3r+FeEytF2Lxs8soef2Qfs0AKusstlJP9bonsVBZdXVXPunX3r/d+wO9P977es2WfrWN1yq7hA6stWaMJFk91WvPrL8LbaCewyGs6OrVgyhLSyadqTtNt2an6QqdvjJU/5wlvWgn8Cq7DfQVrjDQ9cmsr4DVr25g5QZgmzcA+Po0qP+cxiS9RFpQbS7UqyLFg6FcKzX6OjTn3wLzbR6ibXaKL8+yfBWfxVIV578RI5O8KA9XX/jz3+9qLtP6A4MObx3U57FxxbpZc3zWHhMvzOaOlYyn+TtoHSnbU7v/O65N7FZG+FTNomGWfGcUNDSPyQkbmGt2C12fiOJLugvh+1cXgFA6DtpZouttdgKXrD7GJTVtlNuPGhe7fFb346cuy9XIP96Hs6le8QX26dcpTfAgW4sDh6wT1pjs1/d0STdAZUoX1sb0pcnqSF4rs19TE4Xs5Tqp1/Tkq9WRk3UJ3S5d45Py0HXJ1F/zE866nDi2Bmg+1y2Yeq0Zsk3WRaI1Qx3Pu6sxwjZuo7WbcEWiexiSzKfixKdwfPL+EoM613WZqV478zAc2F8bZxrtYpjralqUkywclVACr+QH/9frtyv9vWQENkrK4xPnzEM8ea3PiKv1bY3bzPAvSrieoSFU+swTSKMZjxihqvk+b0RgAO456joWF0phb16hBbjLVvcyheqcAjQh6detnGLiBvtpCqDU+quKkd75q7b8PRnHuBzuMU39mosB5/pTMfQUM54LbRK8osVZC4X5dHvtKWPRiWFo6LHukj4i3u3WjEW81a/K8fNTTcCCD4YbeeUxA0aMxxqFYdmjBRadsS7TUOns1BeWoXcAKmMqoPD+i5fyXF648uATa+5YgzPqvaD7GS7gRl2ac0+Ei0H5t6dL2kAYvmXyxVTEZJwGqMJ5rejs1ntfVciA3kJiL4ZxS4EKDFN7Tf2ucx49P+idEf7Lbzj2yaItS8JB8HbeC9DXh4r/XVu0ioL4vm+n9O7qucPTpaF8TXuNgL7+Xdj+BMpg5K2fIWwHEHuBN/eCx2mkSloNTX8E5tU9HsJJTC7886uP2ZZ2MOro+p4XhUupEExteB4Ch2Q0tdB2NHqVUoZF/TcJP5N/fof3akRsDd3Yd353pcdXyYe+YBKGyvGfoke1fcyF6p7yqUEQ4n1aOv11tvcgRyeruur3J4YfC+jKOuMzvK0SQ9ArhHDzLGmq2O2pn2S1/sDbaFfUYWUiGuRmm48txX3NJuU+q8A2Rz3026gEMQMY2Hn5LIfKfHQS3/HE420sGvttnL/FBA837M7UM6STsh4bmopEZ2dBWW8YQWJV2elnRF3KjorwRI5CtHzYkT/OfjWhecIanzRCBBIe/LepmuGvzv5yQ94U6IdfUxtXmRA9MMa0uA5B6c2Q7xCviXbOWBiLbxENZtdahRE+gEDExzi7QAYQYfgQ0hR/NVNggA+ioZNcWHKNem0FnbkE4kdL9K5zV3c9v/jpcYaz3zY4q0OGkKr5FfEgl+kPkAhxeHnwGl39qUERhfkIJ5jIDIRIjrsZd649qB0vy8I6oqKjjgMIatxre0o/Pd9oIYwJuEIPV70ysVR43mNo+AtjLF84mWxKzLw4ErqaOzLyfIfCianI+ZNCWbNr4za2EWc9L+wQ7wwgnSrysRJhrmPZCp5s6h8iuA6D6ndHf6Zw8CTSk+yxsTcgmUvJHCSsdDlECty1KVRduLsLF30yYE0xLfYJrcC4OERfMql1EWJJzkc0PalxuJSFutw7jNW8H8I3MZ/Rf7bqgserOSCQmLLcT/WcJIDfUbLgu4smr73pGIILiloo4uBAhAPaKOQP7eicj59VTs/35ZDLX2MPeGcmR56x0hJK/YCH+RCG7Wz74Bla1Y9nWKJyZwGdYauIiv26lMxZRMO3pmY9rDNrIz/DO555odBpXZj7AohGefjE5fn3kSqc/4zVy+pFs1HihJCQLoeqXpR81nR6yAjJfWOpF4I61rc3Tv/xK/2X8q/0i1A1+g/JM304oZr3nGISGxvp7PvoamR4pGUCDKvjfn6cYnrOOWiosAzHrGfsarfaTjXFJ2htEXISk+qqXAmfjKEes1mD6N0TlqnPjYLiQXOyuJWCXcT+CJb27i6ZgDHf2NAt8C5aFERT4R550wtsL4C7H4Ta4oVyc/VOkpNq1PRnbKKx5/tjm72k7UwUc1er6KF30dhQssGugiiBqksUK0s3HwptUik8wGOl/XEsdeig/STdBU0J3W5eJoLDgWoIvzMI8cBQbQcA3L+xgAV3dS0ECxcBd0kKBfWspg8OAGY1yV/yIB58OQ95MM25AEFqWK148NHDV5pqPsZZyLI9tDI0PFTaLTut7dShnIydDmCKbDEGyjRbrQ+WacqVbHnKs1Xn4t3dtqa9ThNWFJ0FfUidGz1WwXm+EQiIuKgCYvGpXVxQPG6qv5BlikjUfwCp6fdL+nvVnmg/FMBpdEDQzWfW2epHp5L7Dw6UN2135woZZ2fO7jUOuybrNE1Jg9cdUUwcEYcHypoOiOQ5fRGHzatGpqS3gEnWdKlNolnb8sV55S3jgxK54t8DLdVPfDgDbypfMBwfoxq41dc0bnOKZwTOdmc7GLv6+sMoEY6oBWlvnOpmc6Ibxu07sPx83StVyUbamL9Ar1PrMXnMsM+32TrDCZ059PS1/HMbLNpu3MMyfJowhmfecitAP4wzP9F53ae95PJxH+46zT/O+eaENUCAgZOCPvvKCPTnATye/qUbpqJhSClEoPkzRSJ20PpVdIJ4ar6HB3+T+GEp/QZofbnKk3j53fINLnJsvtJFiy1hi140f4wWyko7xmEne1Go1beiG1yisoPlLkWjHyklG7yziH0XoAN+05c5w8Nrf9rdJJfLuZjX301GXfKr0+NAh59uXL1Mx5VcfpQv3j1/LPHuydnuKDSgmqQuHzUrfm8SEJlIAwdNPZ4GuWpXFKQdhmHTKgcdTkR7YUPx2+lrupnD+BGtUZ1cKpEJp5eg8uWThRBxXguGqp7Fa0XIgAu8sjGVf/p1k8BiOHXX5T9R4bqouH9d2VyKZKtsp3ZN2Tofscxx/tYvhi4/hRrQK9QJOU2UPBoOMikMwcYAGfhwoh3j/yxNSYwQg6RauGDDPmUl2MUiXoYrXuPfhyB5ZovnATBfS2TAR7lpOMPiTNvSbr5hpdWg2oPprMnIc2kiZsR15TgdbF5Adv+ahIftgVKCNSvDl4mXEVxNgE47YCubEWx69p5g22SbsDM0G9f2k/+OqpVAmNSuIEQ/Vqaj4xy4af7KFcmXZjbhFW5u+EhqLZ9eyeshsR6WU8FXSwy91mzgbdh8K2/lvrhglwWAq+v3lwsiI9annoPIVhQHGz62AqgT6EgKzyiLjHtBceZ2YyXEcZl6IDTcmD5ZY+bY1aOHP8AynIQh1p/uRqkR1nvzPnzAbnB6CvgoGae031B5Jx+pQrbKGJfkttvVTgtBCu2Hotrs/UD92L4ZxQChCyoCqByv/3+hfcPHuk0NBJ+uQQfnxM7bC4rswuiTm6TGqCEjjbzVtEB5uZ00auG3aSMfe/KwaMlqdW5GIRWLKuF74Fi6z9Bw76c2A/jvKLaAnGC6Xt8WKQEIdTpmUu6kAYrsPlazkFPM/MJR06ieGmoV7sxi1QXm9sS9M/REh3V+XV2kJh37/7oknUkB1VQYaNsU7ojX14OgRYPeTJbzqp6cxlYv4mwqmRywPiwi4XoE7vAiOJX5ouDCtCXfo0DpVGKEPW9Z9HoRI0g/nsQIcSeAS5BACRjfPGWQ18NrBNU3Uw8H2rClTwhdKHYMFWWFHMUpS6J8SSoovMCfNGByryoXK57C4KtuWOVel05M1DfKIspR1A3u1xdqrnqWjjnRueFWnlKwY42urV0xdNS3Fkml2HUU3lRFRWB9odyUaOBnYEpDwxeKeIdDxcdd9ezlrKBgd3nf7Ck9JC4OiW/YFO7xcMZlSk2WfZODOx5DMrYOxvjK74K1XAT3U+MR0HluiwR8DaDJHyTNavychuXTpg2xSE701CiGq6raiJ3deCFeWRe+zCFeapDzFazSDnecmnmLj5WNdyV3esGfpgti4VzIq23FFcVFRGBwo5rG4S1XfF7TiROfMgDiQnQnlF6JA6lyRByN1LefSa/pFPbsub4YhOLolrSAjjX+VvH3oO/y3NiW9svMeHCMIoXK2x/9Uly5CAUlIg3S0RFHQrCqHmxx3SxU8M4JNjQgQJJ1pH/hvUvXEj6u3QAjKlWCLPBO+toyX2pHNNev2oIPsLGe+D7ykCyn/Ty9vTHyNhH0CY6IWUa77154g3fMSdSnwCYOk+KMVULGjru3XLRk2muhfyZNxR1P/uRP8eRPeY03KCqVn++oYdHYeftDLKe7y3d8kIRm4AIr54oDxuGDblRgU8G6U9BxrpKzRLKgSFnt/UHdANqO0RVtitGXkcTb6vj3OHvlyP1dRjleE6OExnBSFB/O1AA8R0C7fzzK2oY0iBv2RrY+fiNbH1fn4+HetQsv2iwkfLsbBzdDDDdkA7+LFUH2HqkIRbWn2CQtrZnZnaasgb2/g1YEXRzx0RYwxokcDOV1Lq0w9Tr3XWQ4FvG7tf4SiuZOH9z6lVDPAKSNCynTCztsCwCwwbaP0H6O/yAg47yWUosy8pnct3Trv7+Ua6z858b+v2Vbx91Yf9fe9Wzd1mw9X/c1X/u56sB6uf4s9URbO6+Pdb+6zazf8zewq0dovb/aWUf0btZAfedWsKNfZR6+rUz0TYuxVI1e2MDw8kHiYlBzQyG1SWk5QawOcLUSRwMI009FcBzErsRxwcLp9loOXXG2y7bjs1FNgGYvt2Jmd/XprbFituCngBOjd4chj14i1OnZYeMMZWQyKsKGF3tX1ASAqr50xs9eWR0fc3UIkEaqcAiaPHwy4cK65aXTcE7JIJmDF7HHTU12YFbuIl0evi48j0HUuX+h5IItl6yPFQVUVj6ghEl7v8jaYVTKVIXtRcI9HHtfG48NcLJ4MOq4iKZhbMhZ4OaymQC6qprDwff9/N/SlPJF0SU2NUErqCw7E4KU/5TmuCYF4WDIeM1p6YQtebofS1pN0QDRV252IdEeJd7QW0IPjoXa9aXvJKiOUgkz5Jw6cXoWsAITWEk2pgMH+CHFrXql63b4YcO9q42VsVJaq2PdtBqTNF44Ph3LCpBp08HtlkUz9aEIzTk+eR26UBE+rk0tkHGsv2o0t+i8K4bZaa3fNagzlWIragJE0zXMHy7IBEMhK1jEDDljUW5uuI4VUr6S9YaAZpUe4Gxc6bhurYumNk/QCwKkPQBMIvzhjFAicIQxC9gdgOSMyDipd3nNHAS7ByAzjJGTGJ81SlwT8q2RdyGnUm55jrnllSDyO3sJiM5o8Hz4GYB89gSV1SD/JVlbACLd+jomF9Zhf24q6XkmJL0JHnx3GCp4rRmmYDbDpxT7R3hUihF04i/XeD1w8ykEj7rGiFZSOY+pxcgS+AEFjJ9zBmpvHXPtM+a4YmDs/ro1evIq5lo1c6mXnqch1U7ZRTmRqkduCUsT5PakS38gCBeMSrpSXLQctv3pe9VvaXcYEw9gGXDP+CYAuMmOTBflgpR7ceLPheKvaxnjtb+T3ucv3h3AQg2lalIH8+2Tmu3mZWr0ok2QcyZ3p4QurELcg7d8/A+LjXvhMRHZNvNgZePFhpGOUxUbwnU75Ta0cd998js1wu84PAbJf3lp9iSI//lKRqG+fgoNa/3JZSTvlLynRHlIjCYNUNqjC/OQ7/TkzY95TXOUvKX4ZqkWOsjFfk1xq0KWSP6tfM+N5aKIk51sTPuv723k++E0k87aDXvATsHZv+zGmLJREdbYqlT4G+h5bbWZ/Vb+jU6X2Am9gDmfqQbsZK1GHfLwAfvxHIsqjuBL3ZKu2zvSyra+lZYOxnzkR+GtBxN0ckVJh1s8RNHZo+N2B1B3SAcxbF3Vc4WFTL7ruJsSDYMA6GVLR38Xhl9KLmbFZUgNFve5buXKWC0RkOZain1e5YKe7OOpn/IjY8irpa47hlzzN9GylEMPfwCmxHqrYvDTl7FohLTvXu2hbjaR62nuXLFs/KL6cWT2b0OvgBVv2Fg2AUYuB01ORGCwqgTfWR2VIp1nT0+g1JNyBgksohrL57UqflkDKFHrUbHtRWyEjOppYipQbDCEDjttkHvj1hZkDWK4jIRUmYfIwj+UBqHUNpGMUVM+8tPjk4Rw9FyUk8jWRfEipixfj70DTGOuUs0opiRLLMaAnvRfPnacnaHZzIGWEFzlS828mMwfeau9+Orp1f3lXSffHTFvD8BwkzUF0OYEyin463HBzkN6nByQs8JMswriP5g5WehS4SYyjwVIZcEi3l9JM3Axzbb5RtFvfAD/RIgUCqlbAP0BlJ7pFLq0ozlZ3yrOjtJl9Lu4ZzfELvBRw6zoqgZSu/kJ4pWcf/eN2zV0+ijHSfXTNke72O0pcpj/8+Pcn55EEdYuHneXInCso8+8Zv0M8ZVjA027vuDuiC2fUd8aVNLU50X07PZkTtBf8+nc0Tea+C5MfBSugYnKLWJR3kncEuUwXFiP1JSAr5veUI8qa7ioTShCby0+caFw1LZk3uOyR3m1HgqiROtc6zxCB6ZiaeoinIozcYWqTO6x+jPhnH1bPZHtWirPIOnjNXKCVnhAbFqflyZ1VLSD3dmH40WD4FZJF+UjSwmXiojv4HXCWGbvfG+KFmds9BvAQa6Ix1/crd0/RNGI5KUot4kEm++Nxv32ozG7PiqwXx9Qv+Ssawfn28MAv9qU4DCrd8LH1Gqkorw0BXM9Q4AcXNTWT8Rx238Wz7zTCN8Wb6+H4V0WWkUQcAP/xnqRaBYDnAKKJY3liMiVp7SHln0n7gRrNCqKxE+xQG1ALpnKO5VIYR82U3YFAkBKlAnnV601gO+4fRtw8pKHauhynFFrTQxK8G+4zOiUBClxWWeJ1QaxBArEDJBrq2EOJ/GdoQ8KNe70RUbYpLkY3bfD2HRVxtxg4Rd0F7lACUIDXIe7uGVbE0CNm6VHX+O3IEPya45tNW2AeLXqzpeFzkqWMEruOL9Y35cV1UZZZvshxrALnUaW3PGTupSoZvP+CRVEzUlDVC8yQclUhy0PidnqJ6G2aavL5a57czkWiKPNMZ1YyefiDZlMNJmZtKUc3E+EGYrq4PBm9HC9P2y7ztKdEkhug65bVfGAA6SaPrHHKmCaFwYpKRS0aZtYPWaDjKDDPkVi4DSdVeIe0B+XeEsPW8r3XLj7y6VtWQ43kZ8D4/wW3nG4rtFxWLiGtenmiOpMcj0vgrAFi2ZgB2dGnvpfbzPG4PhNeytzET4Ro2zS9QKCtBWB8Nmp3w41R2tXki5VajJjqfvNtKUPKbwWopbNQAnzu0A9E+u/3LeyukNDXcd0ZiF5iMroX9QtXMAMmyI/J1mQaJd9F5pb8xCiTOej5SKiciyILWMB6raNSfAnIMf3GWMSlyIYO7ssONgNaDTyCLTbgk0lHOuOCp8E8fFfscx/+KWTMpWLysdPfl/DdZhq8knTZ8lNX4vJZXDOy4wmgk0ZToY09zqovLVgKh6uBTCnZhAmV8BATno1QtFg2qLXiq6pKre3cSThQwdEnxCYaJZiBrIsJ+A95NLXHuFLGeWobtNr10IH/Z35+TrGxc9OCto6ZktgAkjP75M/Cz1YWMdQoABzq1dkmkA5U7gm/MSEW4Uy9+KDBdxtZm+pwiIwHcraaBSJgImm2oV9IyUo4wYXWUjwkwEYiNEzjkJw8S3FPvnBR1NuWQOiWQc3AjaZuvhJtEo5mck+daTk9PO+W2efl7FeJmv9qz71G3H/3q/4e4xNSlTCMAxa9sLYuk+AEy9XLt4puqzycsrLSi8jVWGL5QoJECvGDpZ5KOYrD88MY60/vp9nyrulyh6XkiKRA8+Qf8qK0SgBN0X/w2aJEj0A","base64")).toString()),Xq)});var a1e=_((nj,ij)=>{(function(t){nj&&typeof nj=="object"&&typeof ij<"u"?ij.exports=t():typeof define=="function"&&define.amd?define([],t):typeof window<"u"?window.isWindows=t():typeof global<"u"?global.isWindows=t():typeof self<"u"?self.isWindows=t():this.isWindows=t()})(function(){"use strict";return function(){return process&&(process.platform==="win32"||/^(msys|cygwin)$/.test(process.env.OSTYPE))}})});var A1e=_((tZt,u1e)=>{"use strict";sj.ifExists=i1t;var uC=ve("util"),oc=ve("path"),l1e=a1e(),t1t=/^#!\s*(?:\/usr\/bin\/env)?\s*([^ \t]+)(.*)$/,r1t={createPwshFile:!0,createCmdFile:l1e(),fs:ve("fs")},n1t=new Map([[".js","node"],[".cjs","node"],[".mjs","node"],[".cmd","cmd"],[".bat","cmd"],[".ps1","pwsh"],[".sh","sh"]]);function c1e(t){let e={...r1t,...t},r=e.fs;return e.fs_={chmod:r.chmod?uC.promisify(r.chmod):async()=>{},mkdir:uC.promisify(r.mkdir),readFile:uC.promisify(r.readFile),stat:uC.promisify(r.stat),unlink:uC.promisify(r.unlink),writeFile:uC.promisify(r.writeFile)},e}async function sj(t,e,r){let o=c1e(r);await o.fs_.stat(t),await o1t(t,e,o)}function i1t(t,e,r){return sj(t,e,r).catch(()=>{})}function s1t(t,e){return e.fs_.unlink(t).catch(()=>{})}async function o1t(t,e,r){let o=await A1t(t,r);return await a1t(e,r),l1t(t,e,o,r)}function a1t(t,e){return e.fs_.mkdir(oc.dirname(t),{recursive:!0})}function l1t(t,e,r,o){let a=c1e(o),n=[{generator:h1t,extension:""}];return a.createCmdFile&&n.push({generator:p1t,extension:".cmd"}),a.createPwshFile&&n.push({generator:g1t,extension:".ps1"}),Promise.all(n.map(u=>f1t(t,e+u.extension,r,u.generator,a)))}function c1t(t,e){return s1t(t,e)}function u1t(t,e){return d1t(t,e)}async function A1t(t,e){let a=(await e.fs_.readFile(t,"utf8")).trim().split(/\r*\n/)[0].match(t1t);if(!a){let n=oc.extname(t).toLowerCase();return{program:n1t.get(n)||null,additionalArgs:""}}return{program:a[1],additionalArgs:a[2]}}async function f1t(t,e,r,o,a){let n=a.preserveSymlinks?"--preserve-symlinks":"",u=[r.additionalArgs,n].filter(A=>A).join(" ");return a=Object.assign({},a,{prog:r.program,args:u}),await c1t(e,a),await a.fs_.writeFile(e,o(t,e,a),"utf8"),u1t(e,a)}function p1t(t,e,r){let a=oc.relative(oc.dirname(e),t).split("/").join("\\"),n=oc.isAbsolute(a)?`"${a}"`:`"%~dp0\\${a}"`,u,A=r.prog,p=r.args||"",h=oj(r.nodePath).win32;A?(u=`"%~dp0\\${A}.exe"`,a=n):(A=n,p="",a="");let E=r.progArgs?`${r.progArgs.join(" ")} `:"",I=h?`@SET NODE_PATH=${h}\r +`:"";return u?I+=`@IF EXIST ${u} (\r + ${u} ${p} ${a} ${E}%*\r +) ELSE (\r + @SETLOCAL\r + @SET PATHEXT=%PATHEXT:;.JS;=;%\r + ${A} ${p} ${a} ${E}%*\r +)\r +`:I+=`@${A} ${p} ${a} ${E}%*\r +`,I}function h1t(t,e,r){let o=oc.relative(oc.dirname(e),t),a=r.prog&&r.prog.split("\\").join("/"),n;o=o.split("\\").join("/");let u=oc.isAbsolute(o)?`"${o}"`:`"$basedir/${o}"`,A=r.args||"",p=oj(r.nodePath).posix;a?(n=`"$basedir/${r.prog}"`,o=u):(a=u,A="",o="");let h=r.progArgs?`${r.progArgs.join(" ")} `:"",E=`#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\\\,/,g')") + +case \`uname\` in + *CYGWIN*) basedir=\`cygpath -w "$basedir"\`;; +esac + +`,I=r.nodePath?`export NODE_PATH="${p}" +`:"";return n?E+=`${I}if [ -x ${n} ]; then + exec ${n} ${A} ${o} ${h}"$@" +else + exec ${a} ${A} ${o} ${h}"$@" +fi +`:E+=`${I}${a} ${A} ${o} ${h}"$@" +exit $? +`,E}function g1t(t,e,r){let o=oc.relative(oc.dirname(e),t),a=r.prog&&r.prog.split("\\").join("/"),n=a&&`"${a}$exe"`,u;o=o.split("\\").join("/");let A=oc.isAbsolute(o)?`"${o}"`:`"$basedir/${o}"`,p=r.args||"",h=oj(r.nodePath),E=h.win32,I=h.posix;n?(u=`"$basedir/${r.prog}$exe"`,o=A):(n=A,p="",o="");let v=r.progArgs?`${r.progArgs.join(" ")} `:"",x=`#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +${r.nodePath?`$env_node_path=$env:NODE_PATH +$env:NODE_PATH="${E}" +`:""}if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +}`;return r.nodePath&&(x+=` else { + $env:NODE_PATH="${I}" +}`),u?x+=` +$ret=0 +if (Test-Path ${u}) { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & ${u} ${p} ${o} ${v}$args + } else { + & ${u} ${p} ${o} ${v}$args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & ${n} ${p} ${o} ${v}$args + } else { + & ${n} ${p} ${o} ${v}$args + } + $ret=$LASTEXITCODE +} +${r.nodePath?`$env:NODE_PATH=$env_node_path +`:""}exit $ret +`:x+=` +# Support pipeline input +if ($MyInvocation.ExpectingInput) { + $input | & ${n} ${p} ${o} ${v}$args +} else { + & ${n} ${p} ${o} ${v}$args +} +${r.nodePath?`$env:NODE_PATH=$env_node_path +`:""}exit $LASTEXITCODE +`,x}function d1t(t,e){return e.fs_.chmod(t,493)}function oj(t){if(!t)return{win32:"",posix:""};let e=typeof t=="string"?t.split(oc.delimiter):Array.from(t),r={};for(let o=0;o`/mnt/${A.toLowerCase()}`):e[o];r.win32=r.win32?`${r.win32};${a}`:a,r.posix=r.posix?`${r.posix}:${n}`:n,r[o]={win32:a,posix:n}}return r}u1e.exports=sj});var Cj=_((I$t,Q1e)=>{Q1e.exports=ve("stream")});var N1e=_((B$t,T1e)=>{"use strict";function F1e(t,e){var r=Object.keys(t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);e&&(o=o.filter(function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable})),r.push.apply(r,o)}return r}function j1t(t){for(var e=1;e0?this.tail.next=o:this.head=o,this.tail=o,++this.length}},{key:"unshift",value:function(r){var o={data:r,next:this.head};this.length===0&&(this.tail=o),this.head=o,++this.length}},{key:"shift",value:function(){if(this.length!==0){var r=this.head.data;return this.length===1?this.head=this.tail=null:this.head=this.head.next,--this.length,r}}},{key:"clear",value:function(){this.head=this.tail=null,this.length=0}},{key:"join",value:function(r){if(this.length===0)return"";for(var o=this.head,a=""+o.data;o=o.next;)a+=r+o.data;return a}},{key:"concat",value:function(r){if(this.length===0)return yQ.alloc(0);for(var o=yQ.allocUnsafe(r>>>0),a=this.head,n=0;a;)J1t(a.data,o,n),n+=a.data.length,a=a.next;return o}},{key:"consume",value:function(r,o){var a;return ru.length?u.length:r;if(A===u.length?n+=u:n+=u.slice(0,r),r-=A,r===0){A===u.length?(++a,o.next?this.head=o.next:this.head=this.tail=null):(this.head=o,o.data=u.slice(A));break}++a}return this.length-=a,n}},{key:"_getBuffer",value:function(r){var o=yQ.allocUnsafe(r),a=this.head,n=1;for(a.data.copy(o),r-=a.data.length;a=a.next;){var u=a.data,A=r>u.length?u.length:r;if(u.copy(o,o.length-r,0,A),r-=A,r===0){A===u.length?(++n,a.next?this.head=a.next:this.head=this.tail=null):(this.head=a,a.data=u.slice(A));break}++n}return this.length-=n,o}},{key:z1t,value:function(r,o){return wj(this,j1t({},o,{depth:0,customInspect:!1}))}}]),t}()});var Bj=_((v$t,M1e)=>{"use strict";function X1t(t,e){var r=this,o=this._readableState&&this._readableState.destroyed,a=this._writableState&&this._writableState.destroyed;return o||a?(e?e(t):t&&(this._writableState?this._writableState.errorEmitted||(this._writableState.errorEmitted=!0,process.nextTick(Ij,this,t)):process.nextTick(Ij,this,t)),this):(this._readableState&&(this._readableState.destroyed=!0),this._writableState&&(this._writableState.destroyed=!0),this._destroy(t||null,function(n){!e&&n?r._writableState?r._writableState.errorEmitted?process.nextTick(EQ,r):(r._writableState.errorEmitted=!0,process.nextTick(L1e,r,n)):process.nextTick(L1e,r,n):e?(process.nextTick(EQ,r),e(n)):process.nextTick(EQ,r)}),this)}function L1e(t,e){Ij(t,e),EQ(t)}function EQ(t){t._writableState&&!t._writableState.emitClose||t._readableState&&!t._readableState.emitClose||t.emit("close")}function Z1t(){this._readableState&&(this._readableState.destroyed=!1,this._readableState.reading=!1,this._readableState.ended=!1,this._readableState.endEmitted=!1),this._writableState&&(this._writableState.destroyed=!1,this._writableState.ended=!1,this._writableState.ending=!1,this._writableState.finalCalled=!1,this._writableState.prefinished=!1,this._writableState.finished=!1,this._writableState.errorEmitted=!1)}function Ij(t,e){t.emit("error",e)}function $1t(t,e){var r=t._readableState,o=t._writableState;r&&r.autoDestroy||o&&o.autoDestroy?t.destroy(e):t.emit("error",e)}M1e.exports={destroy:X1t,undestroy:Z1t,errorOrDestroy:$1t}});var Gh=_((D$t,_1e)=>{"use strict";var U1e={};function lc(t,e,r){r||(r=Error);function o(n,u,A){return typeof e=="string"?e:e(n,u,A)}class a extends r{constructor(u,A,p){super(o(u,A,p))}}a.prototype.name=r.name,a.prototype.code=t,U1e[t]=a}function O1e(t,e){if(Array.isArray(t)){let r=t.length;return t=t.map(o=>String(o)),r>2?`one of ${e} ${t.slice(0,r-1).join(", ")}, or `+t[r-1]:r===2?`one of ${e} ${t[0]} or ${t[1]}`:`of ${e} ${t[0]}`}else return`of ${e} ${String(t)}`}function e2t(t,e,r){return t.substr(!r||r<0?0:+r,e.length)===e}function t2t(t,e,r){return(r===void 0||r>t.length)&&(r=t.length),t.substring(r-e.length,r)===e}function r2t(t,e,r){return typeof r!="number"&&(r=0),r+e.length>t.length?!1:t.indexOf(e,r)!==-1}lc("ERR_INVALID_OPT_VALUE",function(t,e){return'The value "'+e+'" is invalid for option "'+t+'"'},TypeError);lc("ERR_INVALID_ARG_TYPE",function(t,e,r){let o;typeof e=="string"&&e2t(e,"not ")?(o="must not be",e=e.replace(/^not /,"")):o="must be";let a;if(t2t(t," argument"))a=`The ${t} ${o} ${O1e(e,"type")}`;else{let n=r2t(t,".")?"property":"argument";a=`The "${t}" ${n} ${o} ${O1e(e,"type")}`}return a+=`. Received type ${typeof r}`,a},TypeError);lc("ERR_STREAM_PUSH_AFTER_EOF","stream.push() after EOF");lc("ERR_METHOD_NOT_IMPLEMENTED",function(t){return"The "+t+" method is not implemented"});lc("ERR_STREAM_PREMATURE_CLOSE","Premature close");lc("ERR_STREAM_DESTROYED",function(t){return"Cannot call "+t+" after a stream was destroyed"});lc("ERR_MULTIPLE_CALLBACK","Callback called multiple times");lc("ERR_STREAM_CANNOT_PIPE","Cannot pipe, not readable");lc("ERR_STREAM_WRITE_AFTER_END","write after end");lc("ERR_STREAM_NULL_VALUES","May not write null values to stream",TypeError);lc("ERR_UNKNOWN_ENCODING",function(t){return"Unknown encoding: "+t},TypeError);lc("ERR_STREAM_UNSHIFT_AFTER_END_EVENT","stream.unshift() after end event");_1e.exports.codes=U1e});var vj=_((P$t,H1e)=>{"use strict";var n2t=Gh().codes.ERR_INVALID_OPT_VALUE;function i2t(t,e,r){return t.highWaterMark!=null?t.highWaterMark:e?t[r]:null}function s2t(t,e,r,o){var a=i2t(e,o,r);if(a!=null){if(!(isFinite(a)&&Math.floor(a)===a)||a<0){var n=o?r:"highWaterMark";throw new n2t(n,a)}return Math.floor(a)}return t.objectMode?16:16*1024}H1e.exports={getHighWaterMark:s2t}});var q1e=_((S$t,Dj)=>{typeof Object.create=="function"?Dj.exports=function(e,r){r&&(e.super_=r,e.prototype=Object.create(r.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}))}:Dj.exports=function(e,r){if(r){e.super_=r;var o=function(){};o.prototype=r.prototype,e.prototype=new o,e.prototype.constructor=e}}});var Yh=_((b$t,Sj)=>{try{if(Pj=ve("util"),typeof Pj.inherits!="function")throw"";Sj.exports=Pj.inherits}catch{Sj.exports=q1e()}var Pj});var G1e=_((x$t,j1e)=>{j1e.exports=ve("util").deprecate});var kj=_((k$t,J1e)=>{"use strict";J1e.exports=Fi;function W1e(t){var e=this;this.next=null,this.entry=null,this.finish=function(){F2t(e,t)}}var gC;Fi.WritableState=$B;var o2t={deprecate:G1e()},K1e=Cj(),wQ=ve("buffer").Buffer,a2t=global.Uint8Array||function(){};function l2t(t){return wQ.from(t)}function c2t(t){return wQ.isBuffer(t)||t instanceof a2t}var xj=Bj(),u2t=vj(),A2t=u2t.getHighWaterMark,Wh=Gh().codes,f2t=Wh.ERR_INVALID_ARG_TYPE,p2t=Wh.ERR_METHOD_NOT_IMPLEMENTED,h2t=Wh.ERR_MULTIPLE_CALLBACK,g2t=Wh.ERR_STREAM_CANNOT_PIPE,d2t=Wh.ERR_STREAM_DESTROYED,m2t=Wh.ERR_STREAM_NULL_VALUES,y2t=Wh.ERR_STREAM_WRITE_AFTER_END,E2t=Wh.ERR_UNKNOWN_ENCODING,dC=xj.errorOrDestroy;Yh()(Fi,K1e);function C2t(){}function $B(t,e,r){gC=gC||ld(),t=t||{},typeof r!="boolean"&&(r=e instanceof gC),this.objectMode=!!t.objectMode,r&&(this.objectMode=this.objectMode||!!t.writableObjectMode),this.highWaterMark=A2t(this,t,"writableHighWaterMark",r),this.finalCalled=!1,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1,this.destroyed=!1;var o=t.decodeStrings===!1;this.decodeStrings=!o,this.defaultEncoding=t.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=function(a){S2t(e,a)},this.writecb=null,this.writelen=0,this.bufferedRequest=null,this.lastBufferedRequest=null,this.pendingcb=0,this.prefinished=!1,this.errorEmitted=!1,this.emitClose=t.emitClose!==!1,this.autoDestroy=!!t.autoDestroy,this.bufferedRequestCount=0,this.corkedRequestsFree=new W1e(this)}$B.prototype.getBuffer=function(){for(var e=this.bufferedRequest,r=[];e;)r.push(e),e=e.next;return r};(function(){try{Object.defineProperty($B.prototype,"buffer",{get:o2t.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer instead.","DEP0003")})}catch{}})();var CQ;typeof Symbol=="function"&&Symbol.hasInstance&&typeof Function.prototype[Symbol.hasInstance]=="function"?(CQ=Function.prototype[Symbol.hasInstance],Object.defineProperty(Fi,Symbol.hasInstance,{value:function(e){return CQ.call(this,e)?!0:this!==Fi?!1:e&&e._writableState instanceof $B}})):CQ=function(e){return e instanceof this};function Fi(t){gC=gC||ld();var e=this instanceof gC;if(!e&&!CQ.call(Fi,this))return new Fi(t);this._writableState=new $B(t,this,e),this.writable=!0,t&&(typeof t.write=="function"&&(this._write=t.write),typeof t.writev=="function"&&(this._writev=t.writev),typeof t.destroy=="function"&&(this._destroy=t.destroy),typeof t.final=="function"&&(this._final=t.final)),K1e.call(this)}Fi.prototype.pipe=function(){dC(this,new g2t)};function w2t(t,e){var r=new y2t;dC(t,r),process.nextTick(e,r)}function I2t(t,e,r,o){var a;return r===null?a=new m2t:typeof r!="string"&&!e.objectMode&&(a=new f2t("chunk",["string","Buffer"],r)),a?(dC(t,a),process.nextTick(o,a),!1):!0}Fi.prototype.write=function(t,e,r){var o=this._writableState,a=!1,n=!o.objectMode&&c2t(t);return n&&!wQ.isBuffer(t)&&(t=l2t(t)),typeof e=="function"&&(r=e,e=null),n?e="buffer":e||(e=o.defaultEncoding),typeof r!="function"&&(r=C2t),o.ending?w2t(this,r):(n||I2t(this,o,t,r))&&(o.pendingcb++,a=v2t(this,o,n,t,e,r)),a};Fi.prototype.cork=function(){this._writableState.corked++};Fi.prototype.uncork=function(){var t=this._writableState;t.corked&&(t.corked--,!t.writing&&!t.corked&&!t.bufferProcessing&&t.bufferedRequest&&V1e(this,t))};Fi.prototype.setDefaultEncoding=function(e){if(typeof e=="string"&&(e=e.toLowerCase()),!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((e+"").toLowerCase())>-1))throw new E2t(e);return this._writableState.defaultEncoding=e,this};Object.defineProperty(Fi.prototype,"writableBuffer",{enumerable:!1,get:function(){return this._writableState&&this._writableState.getBuffer()}});function B2t(t,e,r){return!t.objectMode&&t.decodeStrings!==!1&&typeof e=="string"&&(e=wQ.from(e,r)),e}Object.defineProperty(Fi.prototype,"writableHighWaterMark",{enumerable:!1,get:function(){return this._writableState.highWaterMark}});function v2t(t,e,r,o,a,n){if(!r){var u=B2t(e,o,a);o!==u&&(r=!0,a="buffer",o=u)}var A=e.objectMode?1:o.length;e.length+=A;var p=e.length{"use strict";var R2t=Object.keys||function(t){var e=[];for(var r in t)e.push(r);return e};Z1e.exports=yA;var X1e=Rj(),Fj=kj();Yh()(yA,X1e);for(Qj=R2t(Fj.prototype),IQ=0;IQ{var vQ=ve("buffer"),rp=vQ.Buffer;function $1e(t,e){for(var r in t)e[r]=t[r]}rp.from&&rp.alloc&&rp.allocUnsafe&&rp.allocUnsafeSlow?e2e.exports=vQ:($1e(vQ,Tj),Tj.Buffer=mC);function mC(t,e,r){return rp(t,e,r)}$1e(rp,mC);mC.from=function(t,e,r){if(typeof t=="number")throw new TypeError("Argument must not be a number");return rp(t,e,r)};mC.alloc=function(t,e,r){if(typeof t!="number")throw new TypeError("Argument must be a number");var o=rp(t);return e!==void 0?typeof r=="string"?o.fill(e,r):o.fill(e):o.fill(0),o};mC.allocUnsafe=function(t){if(typeof t!="number")throw new TypeError("Argument must be a number");return rp(t)};mC.allocUnsafeSlow=function(t){if(typeof t!="number")throw new TypeError("Argument must be a number");return vQ.SlowBuffer(t)}});var Mj=_(n2e=>{"use strict";var Lj=t2e().Buffer,r2e=Lj.isEncoding||function(t){switch(t=""+t,t&&t.toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":case"raw":return!0;default:return!1}};function L2t(t){if(!t)return"utf8";for(var e;;)switch(t){case"utf8":case"utf-8":return"utf8";case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return"utf16le";case"latin1":case"binary":return"latin1";case"base64":case"ascii":case"hex":return t;default:if(e)return;t=(""+t).toLowerCase(),e=!0}}function M2t(t){var e=L2t(t);if(typeof e!="string"&&(Lj.isEncoding===r2e||!r2e(t)))throw new Error("Unknown encoding: "+t);return e||t}n2e.StringDecoder=ev;function ev(t){this.encoding=M2t(t);var e;switch(this.encoding){case"utf16le":this.text=j2t,this.end=G2t,e=4;break;case"utf8":this.fillLast=_2t,e=4;break;case"base64":this.text=Y2t,this.end=W2t,e=3;break;default:this.write=K2t,this.end=V2t;return}this.lastNeed=0,this.lastTotal=0,this.lastChar=Lj.allocUnsafe(e)}ev.prototype.write=function(t){if(t.length===0)return"";var e,r;if(this.lastNeed){if(e=this.fillLast(t),e===void 0)return"";r=this.lastNeed,this.lastNeed=0}else r=0;return r>5===6?2:t>>4===14?3:t>>3===30?4:t>>6===2?-1:-2}function O2t(t,e,r){var o=e.length-1;if(o=0?(a>0&&(t.lastNeed=a-1),a):--o=0?(a>0&&(t.lastNeed=a-2),a):--o=0?(a>0&&(a===2?a=0:t.lastNeed=a-3),a):0))}function U2t(t,e,r){if((e[0]&192)!==128)return t.lastNeed=0,"\uFFFD";if(t.lastNeed>1&&e.length>1){if((e[1]&192)!==128)return t.lastNeed=1,"\uFFFD";if(t.lastNeed>2&&e.length>2&&(e[2]&192)!==128)return t.lastNeed=2,"\uFFFD"}}function _2t(t){var e=this.lastTotal-this.lastNeed,r=U2t(this,t,e);if(r!==void 0)return r;if(this.lastNeed<=t.length)return t.copy(this.lastChar,e,0,this.lastNeed),this.lastChar.toString(this.encoding,0,this.lastTotal);t.copy(this.lastChar,e,0,t.length),this.lastNeed-=t.length}function H2t(t,e){var r=O2t(this,t,e);if(!this.lastNeed)return t.toString("utf8",e);this.lastTotal=r;var o=t.length-(r-this.lastNeed);return t.copy(this.lastChar,0,o),t.toString("utf8",e,o)}function q2t(t){var e=t&&t.length?this.write(t):"";return this.lastNeed?e+"\uFFFD":e}function j2t(t,e){if((t.length-e)%2===0){var r=t.toString("utf16le",e);if(r){var o=r.charCodeAt(r.length-1);if(o>=55296&&o<=56319)return this.lastNeed=2,this.lastTotal=4,this.lastChar[0]=t[t.length-2],this.lastChar[1]=t[t.length-1],r.slice(0,-1)}return r}return this.lastNeed=1,this.lastTotal=2,this.lastChar[0]=t[t.length-1],t.toString("utf16le",e,t.length-1)}function G2t(t){var e=t&&t.length?this.write(t):"";if(this.lastNeed){var r=this.lastTotal-this.lastNeed;return e+this.lastChar.toString("utf16le",0,r)}return e}function Y2t(t,e){var r=(t.length-e)%3;return r===0?t.toString("base64",e):(this.lastNeed=3-r,this.lastTotal=3,r===1?this.lastChar[0]=t[t.length-1]:(this.lastChar[0]=t[t.length-2],this.lastChar[1]=t[t.length-1]),t.toString("base64",e,t.length-r))}function W2t(t){var e=t&&t.length?this.write(t):"";return this.lastNeed?e+this.lastChar.toString("base64",0,3-this.lastNeed):e}function K2t(t){return t.toString(this.encoding)}function V2t(t){return t&&t.length?this.write(t):""}});var DQ=_((R$t,o2e)=>{"use strict";var i2e=Gh().codes.ERR_STREAM_PREMATURE_CLOSE;function z2t(t){var e=!1;return function(){if(!e){e=!0;for(var r=arguments.length,o=new Array(r),a=0;a{"use strict";var PQ;function Kh(t,e,r){return e in t?Object.defineProperty(t,e,{value:r,enumerable:!0,configurable:!0,writable:!0}):t[e]=r,t}var Z2t=DQ(),Vh=Symbol("lastResolve"),cd=Symbol("lastReject"),tv=Symbol("error"),SQ=Symbol("ended"),ud=Symbol("lastPromise"),Oj=Symbol("handlePromise"),Ad=Symbol("stream");function zh(t,e){return{value:t,done:e}}function $2t(t){var e=t[Vh];if(e!==null){var r=t[Ad].read();r!==null&&(t[ud]=null,t[Vh]=null,t[cd]=null,e(zh(r,!1)))}}function eBt(t){process.nextTick($2t,t)}function tBt(t,e){return function(r,o){t.then(function(){if(e[SQ]){r(zh(void 0,!0));return}e[Oj](r,o)},o)}}var rBt=Object.getPrototypeOf(function(){}),nBt=Object.setPrototypeOf((PQ={get stream(){return this[Ad]},next:function(){var e=this,r=this[tv];if(r!==null)return Promise.reject(r);if(this[SQ])return Promise.resolve(zh(void 0,!0));if(this[Ad].destroyed)return new Promise(function(u,A){process.nextTick(function(){e[tv]?A(e[tv]):u(zh(void 0,!0))})});var o=this[ud],a;if(o)a=new Promise(tBt(o,this));else{var n=this[Ad].read();if(n!==null)return Promise.resolve(zh(n,!1));a=new Promise(this[Oj])}return this[ud]=a,a}},Kh(PQ,Symbol.asyncIterator,function(){return this}),Kh(PQ,"return",function(){var e=this;return new Promise(function(r,o){e[Ad].destroy(null,function(a){if(a){o(a);return}r(zh(void 0,!0))})})}),PQ),rBt),iBt=function(e){var r,o=Object.create(nBt,(r={},Kh(r,Ad,{value:e,writable:!0}),Kh(r,Vh,{value:null,writable:!0}),Kh(r,cd,{value:null,writable:!0}),Kh(r,tv,{value:null,writable:!0}),Kh(r,SQ,{value:e._readableState.endEmitted,writable:!0}),Kh(r,Oj,{value:function(n,u){var A=o[Ad].read();A?(o[ud]=null,o[Vh]=null,o[cd]=null,n(zh(A,!1))):(o[Vh]=n,o[cd]=u)},writable:!0}),r));return o[ud]=null,Z2t(e,function(a){if(a&&a.code!=="ERR_STREAM_PREMATURE_CLOSE"){var n=o[cd];n!==null&&(o[ud]=null,o[Vh]=null,o[cd]=null,n(a)),o[tv]=a;return}var u=o[Vh];u!==null&&(o[ud]=null,o[Vh]=null,o[cd]=null,u(zh(void 0,!0))),o[SQ]=!0}),e.on("readable",eBt.bind(null,o)),o};a2e.exports=iBt});var f2e=_((N$t,A2e)=>{"use strict";function c2e(t,e,r,o,a,n,u){try{var A=t[n](u),p=A.value}catch(h){r(h);return}A.done?e(p):Promise.resolve(p).then(o,a)}function sBt(t){return function(){var e=this,r=arguments;return new Promise(function(o,a){var n=t.apply(e,r);function u(p){c2e(n,o,a,u,A,"next",p)}function A(p){c2e(n,o,a,u,A,"throw",p)}u(void 0)})}}function u2e(t,e){var r=Object.keys(t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(t);e&&(o=o.filter(function(a){return Object.getOwnPropertyDescriptor(t,a).enumerable})),r.push.apply(r,o)}return r}function oBt(t){for(var e=1;e{"use strict";I2e.exports=mn;var yC;mn.ReadableState=d2e;var L$t=ve("events").EventEmitter,g2e=function(e,r){return e.listeners(r).length},nv=Cj(),bQ=ve("buffer").Buffer,uBt=global.Uint8Array||function(){};function ABt(t){return bQ.from(t)}function fBt(t){return bQ.isBuffer(t)||t instanceof uBt}var Uj=ve("util"),en;Uj&&Uj.debuglog?en=Uj.debuglog("stream"):en=function(){};var pBt=N1e(),Wj=Bj(),hBt=vj(),gBt=hBt.getHighWaterMark,xQ=Gh().codes,dBt=xQ.ERR_INVALID_ARG_TYPE,mBt=xQ.ERR_STREAM_PUSH_AFTER_EOF,yBt=xQ.ERR_METHOD_NOT_IMPLEMENTED,EBt=xQ.ERR_STREAM_UNSHIFT_AFTER_END_EVENT,EC,_j,Hj;Yh()(mn,nv);var rv=Wj.errorOrDestroy,qj=["error","close","destroy","pause","resume"];function CBt(t,e,r){if(typeof t.prependListener=="function")return t.prependListener(e,r);!t._events||!t._events[e]?t.on(e,r):Array.isArray(t._events[e])?t._events[e].unshift(r):t._events[e]=[r,t._events[e]]}function d2e(t,e,r){yC=yC||ld(),t=t||{},typeof r!="boolean"&&(r=e instanceof yC),this.objectMode=!!t.objectMode,r&&(this.objectMode=this.objectMode||!!t.readableObjectMode),this.highWaterMark=gBt(this,t,"readableHighWaterMark",r),this.buffer=new pBt,this.length=0,this.pipes=null,this.pipesCount=0,this.flowing=null,this.ended=!1,this.endEmitted=!1,this.reading=!1,this.sync=!0,this.needReadable=!1,this.emittedReadable=!1,this.readableListening=!1,this.resumeScheduled=!1,this.paused=!0,this.emitClose=t.emitClose!==!1,this.autoDestroy=!!t.autoDestroy,this.destroyed=!1,this.defaultEncoding=t.defaultEncoding||"utf8",this.awaitDrain=0,this.readingMore=!1,this.decoder=null,this.encoding=null,t.encoding&&(EC||(EC=Mj().StringDecoder),this.decoder=new EC(t.encoding),this.encoding=t.encoding)}function mn(t){if(yC=yC||ld(),!(this instanceof mn))return new mn(t);var e=this instanceof yC;this._readableState=new d2e(t,this,e),this.readable=!0,t&&(typeof t.read=="function"&&(this._read=t.read),typeof t.destroy=="function"&&(this._destroy=t.destroy)),nv.call(this)}Object.defineProperty(mn.prototype,"destroyed",{enumerable:!1,get:function(){return this._readableState===void 0?!1:this._readableState.destroyed},set:function(e){this._readableState&&(this._readableState.destroyed=e)}});mn.prototype.destroy=Wj.destroy;mn.prototype._undestroy=Wj.undestroy;mn.prototype._destroy=function(t,e){e(t)};mn.prototype.push=function(t,e){var r=this._readableState,o;return r.objectMode?o=!0:typeof t=="string"&&(e=e||r.defaultEncoding,e!==r.encoding&&(t=bQ.from(t,e),e=""),o=!0),m2e(this,t,e,!1,o)};mn.prototype.unshift=function(t){return m2e(this,t,null,!0,!1)};function m2e(t,e,r,o,a){en("readableAddChunk",e);var n=t._readableState;if(e===null)n.reading=!1,BBt(t,n);else{var u;if(a||(u=wBt(n,e)),u)rv(t,u);else if(n.objectMode||e&&e.length>0)if(typeof e!="string"&&!n.objectMode&&Object.getPrototypeOf(e)!==bQ.prototype&&(e=ABt(e)),o)n.endEmitted?rv(t,new EBt):jj(t,n,e,!0);else if(n.ended)rv(t,new mBt);else{if(n.destroyed)return!1;n.reading=!1,n.decoder&&!r?(e=n.decoder.write(e),n.objectMode||e.length!==0?jj(t,n,e,!1):Yj(t,n)):jj(t,n,e,!1)}else o||(n.reading=!1,Yj(t,n))}return!n.ended&&(n.length=p2e?t=p2e:(t--,t|=t>>>1,t|=t>>>2,t|=t>>>4,t|=t>>>8,t|=t>>>16,t++),t}function h2e(t,e){return t<=0||e.length===0&&e.ended?0:e.objectMode?1:t!==t?e.flowing&&e.length?e.buffer.head.data.length:e.length:(t>e.highWaterMark&&(e.highWaterMark=IBt(t)),t<=e.length?t:e.ended?e.length:(e.needReadable=!0,0))}mn.prototype.read=function(t){en("read",t),t=parseInt(t,10);var e=this._readableState,r=t;if(t!==0&&(e.emittedReadable=!1),t===0&&e.needReadable&&((e.highWaterMark!==0?e.length>=e.highWaterMark:e.length>0)||e.ended))return en("read: emitReadable",e.length,e.ended),e.length===0&&e.ended?Gj(this):kQ(this),null;if(t=h2e(t,e),t===0&&e.ended)return e.length===0&&Gj(this),null;var o=e.needReadable;en("need readable",o),(e.length===0||e.length-t0?a=C2e(t,e):a=null,a===null?(e.needReadable=e.length<=e.highWaterMark,t=0):(e.length-=t,e.awaitDrain=0),e.length===0&&(e.ended||(e.needReadable=!0),r!==t&&e.ended&&Gj(this)),a!==null&&this.emit("data",a),a};function BBt(t,e){if(en("onEofChunk"),!e.ended){if(e.decoder){var r=e.decoder.end();r&&r.length&&(e.buffer.push(r),e.length+=e.objectMode?1:r.length)}e.ended=!0,e.sync?kQ(t):(e.needReadable=!1,e.emittedReadable||(e.emittedReadable=!0,y2e(t)))}}function kQ(t){var e=t._readableState;en("emitReadable",e.needReadable,e.emittedReadable),e.needReadable=!1,e.emittedReadable||(en("emitReadable",e.flowing),e.emittedReadable=!0,process.nextTick(y2e,t))}function y2e(t){var e=t._readableState;en("emitReadable_",e.destroyed,e.length,e.ended),!e.destroyed&&(e.length||e.ended)&&(t.emit("readable"),e.emittedReadable=!1),e.needReadable=!e.flowing&&!e.ended&&e.length<=e.highWaterMark,Kj(t)}function Yj(t,e){e.readingMore||(e.readingMore=!0,process.nextTick(vBt,t,e))}function vBt(t,e){for(;!e.reading&&!e.ended&&(e.length1&&w2e(o.pipes,t)!==-1)&&!h&&(en("false write response, pause",o.awaitDrain),o.awaitDrain++),r.pause())}function v(L){en("onerror",L),R(),t.removeListener("error",v),g2e(t,"error")===0&&rv(t,L)}CBt(t,"error",v);function x(){t.removeListener("finish",C),R()}t.once("close",x);function C(){en("onfinish"),t.removeListener("close",x),R()}t.once("finish",C);function R(){en("unpipe"),r.unpipe(t)}return t.emit("pipe",r),o.flowing||(en("pipe resume"),r.resume()),t};function DBt(t){return function(){var r=t._readableState;en("pipeOnDrain",r.awaitDrain),r.awaitDrain&&r.awaitDrain--,r.awaitDrain===0&&g2e(t,"data")&&(r.flowing=!0,Kj(t))}}mn.prototype.unpipe=function(t){var e=this._readableState,r={hasUnpiped:!1};if(e.pipesCount===0)return this;if(e.pipesCount===1)return t&&t!==e.pipes?this:(t||(t=e.pipes),e.pipes=null,e.pipesCount=0,e.flowing=!1,t&&t.emit("unpipe",this,r),this);if(!t){var o=e.pipes,a=e.pipesCount;e.pipes=null,e.pipesCount=0,e.flowing=!1;for(var n=0;n0,o.flowing!==!1&&this.resume()):t==="readable"&&!o.endEmitted&&!o.readableListening&&(o.readableListening=o.needReadable=!0,o.flowing=!1,o.emittedReadable=!1,en("on readable",o.length,o.reading),o.length?kQ(this):o.reading||process.nextTick(PBt,this)),r};mn.prototype.addListener=mn.prototype.on;mn.prototype.removeListener=function(t,e){var r=nv.prototype.removeListener.call(this,t,e);return t==="readable"&&process.nextTick(E2e,this),r};mn.prototype.removeAllListeners=function(t){var e=nv.prototype.removeAllListeners.apply(this,arguments);return(t==="readable"||t===void 0)&&process.nextTick(E2e,this),e};function E2e(t){var e=t._readableState;e.readableListening=t.listenerCount("readable")>0,e.resumeScheduled&&!e.paused?e.flowing=!0:t.listenerCount("data")>0&&t.resume()}function PBt(t){en("readable nexttick read 0"),t.read(0)}mn.prototype.resume=function(){var t=this._readableState;return t.flowing||(en("resume"),t.flowing=!t.readableListening,SBt(this,t)),t.paused=!1,this};function SBt(t,e){e.resumeScheduled||(e.resumeScheduled=!0,process.nextTick(bBt,t,e))}function bBt(t,e){en("resume",e.reading),e.reading||t.read(0),e.resumeScheduled=!1,t.emit("resume"),Kj(t),e.flowing&&!e.reading&&t.read(0)}mn.prototype.pause=function(){return en("call pause flowing=%j",this._readableState.flowing),this._readableState.flowing!==!1&&(en("pause"),this._readableState.flowing=!1,this.emit("pause")),this._readableState.paused=!0,this};function Kj(t){var e=t._readableState;for(en("flow",e.flowing);e.flowing&&t.read()!==null;);}mn.prototype.wrap=function(t){var e=this,r=this._readableState,o=!1;t.on("end",function(){if(en("wrapped end"),r.decoder&&!r.ended){var u=r.decoder.end();u&&u.length&&e.push(u)}e.push(null)}),t.on("data",function(u){if(en("wrapped data"),r.decoder&&(u=r.decoder.write(u)),!(r.objectMode&&u==null)&&!(!r.objectMode&&(!u||!u.length))){var A=e.push(u);A||(o=!0,t.pause())}});for(var a in t)this[a]===void 0&&typeof t[a]=="function"&&(this[a]=function(A){return function(){return t[A].apply(t,arguments)}}(a));for(var n=0;n=e.length?(e.decoder?r=e.buffer.join(""):e.buffer.length===1?r=e.buffer.first():r=e.buffer.concat(e.length),e.buffer.clear()):r=e.buffer.consume(t,e.decoder),r}function Gj(t){var e=t._readableState;en("endReadable",e.endEmitted),e.endEmitted||(e.ended=!0,process.nextTick(xBt,e,t))}function xBt(t,e){if(en("endReadableNT",t.endEmitted,t.length),!t.endEmitted&&t.length===0&&(t.endEmitted=!0,e.readable=!1,e.emit("end"),t.autoDestroy)){var r=e._writableState;(!r||r.autoDestroy&&r.finished)&&e.destroy()}}typeof Symbol=="function"&&(mn.from=function(t,e){return Hj===void 0&&(Hj=f2e()),Hj(mn,t,e)});function w2e(t,e){for(var r=0,o=t.length;r{"use strict";v2e.exports=np;var QQ=Gh().codes,kBt=QQ.ERR_METHOD_NOT_IMPLEMENTED,QBt=QQ.ERR_MULTIPLE_CALLBACK,FBt=QQ.ERR_TRANSFORM_ALREADY_TRANSFORMING,RBt=QQ.ERR_TRANSFORM_WITH_LENGTH_0,FQ=ld();Yh()(np,FQ);function TBt(t,e){var r=this._transformState;r.transforming=!1;var o=r.writecb;if(o===null)return this.emit("error",new QBt);r.writechunk=null,r.writecb=null,e!=null&&this.push(e),o(t);var a=this._readableState;a.reading=!1,(a.needReadable||a.length{"use strict";P2e.exports=iv;var D2e=Vj();Yh()(iv,D2e);function iv(t){if(!(this instanceof iv))return new iv(t);D2e.call(this,t)}iv.prototype._transform=function(t,e,r){r(null,t)}});var F2e=_((_$t,Q2e)=>{"use strict";var zj;function LBt(t){var e=!1;return function(){e||(e=!0,t.apply(void 0,arguments))}}var k2e=Gh().codes,MBt=k2e.ERR_MISSING_ARGS,OBt=k2e.ERR_STREAM_DESTROYED;function b2e(t){if(t)throw t}function UBt(t){return t.setHeader&&typeof t.abort=="function"}function _Bt(t,e,r,o){o=LBt(o);var a=!1;t.on("close",function(){a=!0}),zj===void 0&&(zj=DQ()),zj(t,{readable:e,writable:r},function(u){if(u)return o(u);a=!0,o()});var n=!1;return function(u){if(!a&&!n){if(n=!0,UBt(t))return t.abort();if(typeof t.destroy=="function")return t.destroy();o(u||new OBt("pipe"))}}}function x2e(t){t()}function HBt(t,e){return t.pipe(e)}function qBt(t){return!t.length||typeof t[t.length-1]!="function"?b2e:t.pop()}function jBt(){for(var t=arguments.length,e=new Array(t),r=0;r0;return _Bt(u,p,h,function(E){a||(a=E),E&&n.forEach(x2e),!p&&(n.forEach(x2e),o(a))})});return e.reduce(HBt)}Q2e.exports=jBt});var CC=_((cc,ov)=>{var sv=ve("stream");process.env.READABLE_STREAM==="disable"&&sv?(ov.exports=sv.Readable,Object.assign(ov.exports,sv),ov.exports.Stream=sv):(cc=ov.exports=Rj(),cc.Stream=sv||cc,cc.Readable=cc,cc.Writable=kj(),cc.Duplex=ld(),cc.Transform=Vj(),cc.PassThrough=S2e(),cc.finished=DQ(),cc.pipeline=F2e())});var N2e=_((H$t,T2e)=>{"use strict";var{Buffer:uu}=ve("buffer"),R2e=Symbol.for("BufferList");function ni(t){if(!(this instanceof ni))return new ni(t);ni._init.call(this,t)}ni._init=function(e){Object.defineProperty(this,R2e,{value:!0}),this._bufs=[],this.length=0,e&&this.append(e)};ni.prototype._new=function(e){return new ni(e)};ni.prototype._offset=function(e){if(e===0)return[0,0];let r=0;for(let o=0;othis.length||e<0)return;let r=this._offset(e);return this._bufs[r[0]][r[1]]};ni.prototype.slice=function(e,r){return typeof e=="number"&&e<0&&(e+=this.length),typeof r=="number"&&r<0&&(r+=this.length),this.copy(null,0,e,r)};ni.prototype.copy=function(e,r,o,a){if((typeof o!="number"||o<0)&&(o=0),(typeof a!="number"||a>this.length)&&(a=this.length),o>=this.length||a<=0)return e||uu.alloc(0);let n=!!e,u=this._offset(o),A=a-o,p=A,h=n&&r||0,E=u[1];if(o===0&&a===this.length){if(!n)return this._bufs.length===1?this._bufs[0]:uu.concat(this._bufs,this.length);for(let I=0;Iv)this._bufs[I].copy(e,h,E),h+=v;else{this._bufs[I].copy(e,h,E,E+p),h+=v;break}p-=v,E&&(E=0)}return e.length>h?e.slice(0,h):e};ni.prototype.shallowSlice=function(e,r){if(e=e||0,r=typeof r!="number"?this.length:r,e<0&&(e+=this.length),r<0&&(r+=this.length),e===r)return this._new();let o=this._offset(e),a=this._offset(r),n=this._bufs.slice(o[0],a[0]+1);return a[1]===0?n.pop():n[n.length-1]=n[n.length-1].slice(0,a[1]),o[1]!==0&&(n[0]=n[0].slice(o[1])),this._new(n)};ni.prototype.toString=function(e,r,o){return this.slice(r,o).toString(e)};ni.prototype.consume=function(e){if(e=Math.trunc(e),Number.isNaN(e)||e<=0)return this;for(;this._bufs.length;)if(e>=this._bufs[0].length)e-=this._bufs[0].length,this.length-=this._bufs[0].length,this._bufs.shift();else{this._bufs[0]=this._bufs[0].slice(e),this.length-=e;break}return this};ni.prototype.duplicate=function(){let e=this._new();for(let r=0;rthis.length?this.length:e;let o=this._offset(e),a=o[0],n=o[1];for(;a=t.length){let p=u.indexOf(t,n);if(p!==-1)return this._reverseOffset([a,p]);n=u.length-t.length+1}else{let p=this._reverseOffset([a,n]);if(this._match(p,t))return p;n++}n=0}return-1};ni.prototype._match=function(t,e){if(this.length-t{"use strict";var Jj=CC().Duplex,GBt=Yh(),av=N2e();function Uo(t){if(!(this instanceof Uo))return new Uo(t);if(typeof t=="function"){this._callback=t;let e=function(o){this._callback&&(this._callback(o),this._callback=null)}.bind(this);this.on("pipe",function(o){o.on("error",e)}),this.on("unpipe",function(o){o.removeListener("error",e)}),t=null}av._init.call(this,t),Jj.call(this)}GBt(Uo,Jj);Object.assign(Uo.prototype,av.prototype);Uo.prototype._new=function(e){return new Uo(e)};Uo.prototype._write=function(e,r,o){this._appendBuffer(e),typeof o=="function"&&o()};Uo.prototype._read=function(e){if(!this.length)return this.push(null);e=Math.min(e,this.length),this.push(this.slice(0,e)),this.consume(e)};Uo.prototype.end=function(e){Jj.prototype.end.call(this,e),this._callback&&(this._callback(null,this.slice()),this._callback=null)};Uo.prototype._destroy=function(e,r){this._bufs.length=0,this.length=0,r(e)};Uo.prototype._isBufferList=function(e){return e instanceof Uo||e instanceof av||Uo.isBufferList(e)};Uo.isBufferList=av.isBufferList;RQ.exports=Uo;RQ.exports.BufferListStream=Uo;RQ.exports.BufferList=av});var $j=_(IC=>{var YBt=Buffer.alloc,WBt="0000000000000000000",KBt="7777777777777777777",M2e=48,O2e=Buffer.from("ustar\0","binary"),VBt=Buffer.from("00","binary"),zBt=Buffer.from("ustar ","binary"),JBt=Buffer.from(" \0","binary"),XBt=parseInt("7777",8),lv=257,Zj=263,ZBt=function(t,e,r){return typeof t!="number"?r:(t=~~t,t>=e?e:t>=0||(t+=e,t>=0)?t:0)},$Bt=function(t){switch(t){case 0:return"file";case 1:return"link";case 2:return"symlink";case 3:return"character-device";case 4:return"block-device";case 5:return"directory";case 6:return"fifo";case 7:return"contiguous-file";case 72:return"pax-header";case 55:return"pax-global-header";case 27:return"gnu-long-link-path";case 28:case 30:return"gnu-long-path"}return null},evt=function(t){switch(t){case"file":return 0;case"link":return 1;case"symlink":return 2;case"character-device":return 3;case"block-device":return 4;case"directory":return 5;case"fifo":return 6;case"contiguous-file":return 7;case"pax-header":return 72}return 0},U2e=function(t,e,r,o){for(;re?KBt.slice(0,e)+" ":WBt.slice(0,e-t.length)+t+" "};function tvt(t){var e;if(t[0]===128)e=!0;else if(t[0]===255)e=!1;else return null;for(var r=[],o=t.length-1;o>0;o--){var a=t[o];e?r.push(a):r.push(255-a)}var n=0,u=r.length;for(o=0;o=Math.pow(10,r)&&r++,e+r+t};IC.decodeLongPath=function(t,e){return wC(t,0,t.length,e)};IC.encodePax=function(t){var e="";t.name&&(e+=Xj(" path="+t.name+` +`)),t.linkname&&(e+=Xj(" linkpath="+t.linkname+` +`));var r=t.pax;if(r)for(var o in r)e+=Xj(" "+o+"="+r[o]+` +`);return Buffer.from(e)};IC.decodePax=function(t){for(var e={};t.length;){for(var r=0;r100;){var a=r.indexOf("/");if(a===-1)return null;o+=o?"/"+r.slice(0,a):r.slice(0,a),r=r.slice(a+1)}return Buffer.byteLength(r)>100||Buffer.byteLength(o)>155||t.linkname&&Buffer.byteLength(t.linkname)>100?null:(e.write(r),e.write(Jh(t.mode&XBt,6),100),e.write(Jh(t.uid,6),108),e.write(Jh(t.gid,6),116),e.write(Jh(t.size,11),124),e.write(Jh(t.mtime.getTime()/1e3|0,11),136),e[156]=M2e+evt(t.type),t.linkname&&e.write(t.linkname,157),O2e.copy(e,lv),VBt.copy(e,Zj),t.uname&&e.write(t.uname,265),t.gname&&e.write(t.gname,297),e.write(Jh(t.devmajor||0,6),329),e.write(Jh(t.devminor||0,6),337),o&&e.write(o,345),e.write(Jh(_2e(e),6),148),e)};IC.decode=function(t,e,r){var o=t[156]===0?0:t[156]-M2e,a=wC(t,0,100,e),n=Xh(t,100,8),u=Xh(t,108,8),A=Xh(t,116,8),p=Xh(t,124,12),h=Xh(t,136,12),E=$Bt(o),I=t[157]===0?null:wC(t,157,100,e),v=wC(t,265,32),x=wC(t,297,32),C=Xh(t,329,8),R=Xh(t,337,8),L=_2e(t);if(L===8*32)return null;if(L!==Xh(t,148,8))throw new Error("Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?");if(O2e.compare(t,lv,lv+6)===0)t[345]&&(a=wC(t,345,155,e)+"/"+a);else if(!(zBt.compare(t,lv,lv+6)===0&&JBt.compare(t,Zj,Zj+2)===0)){if(!r)throw new Error("Invalid tar header: unknown format.")}return o===0&&a&&a[a.length-1]==="/"&&(o=5),{name:a,mode:n,uid:u,gid:A,size:p,mtime:new Date(1e3*h),type:E,linkname:I,uname:v,gname:x,devmajor:C,devminor:R}}});var K2e=_((G$t,W2e)=>{var q2e=ve("util"),rvt=L2e(),cv=$j(),j2e=CC().Writable,G2e=CC().PassThrough,Y2e=function(){},H2e=function(t){return t&=511,t&&512-t},nvt=function(t,e){var r=new TQ(t,e);return r.end(),r},ivt=function(t,e){return e.path&&(t.name=e.path),e.linkpath&&(t.linkname=e.linkpath),e.size&&(t.size=parseInt(e.size,10)),t.pax=e,t},TQ=function(t,e){this._parent=t,this.offset=e,G2e.call(this,{autoDestroy:!1})};q2e.inherits(TQ,G2e);TQ.prototype.destroy=function(t){this._parent.destroy(t)};var ip=function(t){if(!(this instanceof ip))return new ip(t);j2e.call(this,t),t=t||{},this._offset=0,this._buffer=rvt(),this._missing=0,this._partial=!1,this._onparse=Y2e,this._header=null,this._stream=null,this._overflow=null,this._cb=null,this._locked=!1,this._destroyed=!1,this._pax=null,this._paxGlobal=null,this._gnuLongPath=null,this._gnuLongLinkPath=null;var e=this,r=e._buffer,o=function(){e._continue()},a=function(v){if(e._locked=!1,v)return e.destroy(v);e._stream||o()},n=function(){e._stream=null;var v=H2e(e._header.size);v?e._parse(v,u):e._parse(512,I),e._locked||o()},u=function(){e._buffer.consume(H2e(e._header.size)),e._parse(512,I),o()},A=function(){var v=e._header.size;e._paxGlobal=cv.decodePax(r.slice(0,v)),r.consume(v),n()},p=function(){var v=e._header.size;e._pax=cv.decodePax(r.slice(0,v)),e._paxGlobal&&(e._pax=Object.assign({},e._paxGlobal,e._pax)),r.consume(v),n()},h=function(){var v=e._header.size;this._gnuLongPath=cv.decodeLongPath(r.slice(0,v),t.filenameEncoding),r.consume(v),n()},E=function(){var v=e._header.size;this._gnuLongLinkPath=cv.decodeLongPath(r.slice(0,v),t.filenameEncoding),r.consume(v),n()},I=function(){var v=e._offset,x;try{x=e._header=cv.decode(r.slice(0,512),t.filenameEncoding,t.allowUnknownFormat)}catch(C){e.emit("error",C)}if(r.consume(512),!x){e._parse(512,I),o();return}if(x.type==="gnu-long-path"){e._parse(x.size,h),o();return}if(x.type==="gnu-long-link-path"){e._parse(x.size,E),o();return}if(x.type==="pax-global-header"){e._parse(x.size,A),o();return}if(x.type==="pax-header"){e._parse(x.size,p),o();return}if(e._gnuLongPath&&(x.name=e._gnuLongPath,e._gnuLongPath=null),e._gnuLongLinkPath&&(x.linkname=e._gnuLongLinkPath,e._gnuLongLinkPath=null),e._pax&&(e._header=x=ivt(x,e._pax),e._pax=null),e._locked=!0,!x.size||x.type==="directory"){e._parse(512,I),e.emit("entry",x,nvt(e,v),a);return}e._stream=new TQ(e,v),e.emit("entry",x,e._stream,a),e._parse(x.size,n),o()};this._onheader=I,this._parse(512,I)};q2e.inherits(ip,j2e);ip.prototype.destroy=function(t){this._destroyed||(this._destroyed=!0,t&&this.emit("error",t),this.emit("close"),this._stream&&this._stream.emit("close"))};ip.prototype._parse=function(t,e){this._destroyed||(this._offset+=t,this._missing=t,e===this._onheader&&(this._partial=!1),this._onparse=e)};ip.prototype._continue=function(){if(!this._destroyed){var t=this._cb;this._cb=Y2e,this._overflow?this._write(this._overflow,void 0,t):t()}};ip.prototype._write=function(t,e,r){if(!this._destroyed){var o=this._stream,a=this._buffer,n=this._missing;if(t.length&&(this._partial=!0),t.lengthn&&(u=t.slice(n),t=t.slice(0,n)),o?o.end(t):a.append(t),this._overflow=u,this._onparse()}};ip.prototype._final=function(t){if(this._partial)return this.destroy(new Error("Unexpected end of data"));t()};W2e.exports=ip});var z2e=_((Y$t,V2e)=>{V2e.exports=ve("fs").constants||ve("constants")});var eBe=_((W$t,$2e)=>{var BC=z2e(),J2e=bO(),LQ=Yh(),svt=Buffer.alloc,X2e=CC().Readable,vC=CC().Writable,ovt=ve("string_decoder").StringDecoder,NQ=$j(),avt=parseInt("755",8),lvt=parseInt("644",8),Z2e=svt(1024),t5=function(){},e5=function(t,e){e&=511,e&&t.push(Z2e.slice(0,512-e))};function cvt(t){switch(t&BC.S_IFMT){case BC.S_IFBLK:return"block-device";case BC.S_IFCHR:return"character-device";case BC.S_IFDIR:return"directory";case BC.S_IFIFO:return"fifo";case BC.S_IFLNK:return"symlink"}return"file"}var MQ=function(t){vC.call(this),this.written=0,this._to=t,this._destroyed=!1};LQ(MQ,vC);MQ.prototype._write=function(t,e,r){if(this.written+=t.length,this._to.push(t))return r();this._to._drain=r};MQ.prototype.destroy=function(){this._destroyed||(this._destroyed=!0,this.emit("close"))};var OQ=function(){vC.call(this),this.linkname="",this._decoder=new ovt("utf-8"),this._destroyed=!1};LQ(OQ,vC);OQ.prototype._write=function(t,e,r){this.linkname+=this._decoder.write(t),r()};OQ.prototype.destroy=function(){this._destroyed||(this._destroyed=!0,this.emit("close"))};var uv=function(){vC.call(this),this._destroyed=!1};LQ(uv,vC);uv.prototype._write=function(t,e,r){r(new Error("No body allowed for this entry"))};uv.prototype.destroy=function(){this._destroyed||(this._destroyed=!0,this.emit("close"))};var EA=function(t){if(!(this instanceof EA))return new EA(t);X2e.call(this,t),this._drain=t5,this._finalized=!1,this._finalizing=!1,this._destroyed=!1,this._stream=null};LQ(EA,X2e);EA.prototype.entry=function(t,e,r){if(this._stream)throw new Error("already piping an entry");if(!(this._finalized||this._destroyed)){typeof e=="function"&&(r=e,e=null),r||(r=t5);var o=this;if((!t.size||t.type==="symlink")&&(t.size=0),t.type||(t.type=cvt(t.mode)),t.mode||(t.mode=t.type==="directory"?avt:lvt),t.uid||(t.uid=0),t.gid||(t.gid=0),t.mtime||(t.mtime=new Date),typeof e=="string"&&(e=Buffer.from(e)),Buffer.isBuffer(e)){t.size=e.length,this._encode(t);var a=this.push(e);return e5(o,t.size),a?process.nextTick(r):this._drain=r,new uv}if(t.type==="symlink"&&!t.linkname){var n=new OQ;return J2e(n,function(A){if(A)return o.destroy(),r(A);t.linkname=n.linkname,o._encode(t),r()}),n}if(this._encode(t),t.type!=="file"&&t.type!=="contiguous-file")return process.nextTick(r),new uv;var u=new MQ(this);return this._stream=u,J2e(u,function(A){if(o._stream=null,A)return o.destroy(),r(A);if(u.written!==t.size)return o.destroy(),r(new Error("size mismatch"));e5(o,t.size),o._finalizing&&o.finalize(),r()}),u}};EA.prototype.finalize=function(){if(this._stream){this._finalizing=!0;return}this._finalized||(this._finalized=!0,this.push(Z2e),this.push(null))};EA.prototype.destroy=function(t){this._destroyed||(this._destroyed=!0,t&&this.emit("error",t),this.emit("close"),this._stream&&this._stream.destroy&&this._stream.destroy())};EA.prototype._encode=function(t){if(!t.pax){var e=NQ.encode(t);if(e){this.push(e);return}}this._encodePax(t)};EA.prototype._encodePax=function(t){var e=NQ.encodePax({name:t.name,linkname:t.linkname,pax:t.pax}),r={name:"PaxHeader",mode:t.mode,uid:t.uid,gid:t.gid,size:e.length,mtime:t.mtime,type:"pax-header",linkname:t.linkname&&"PaxHeader",uname:t.uname,gname:t.gname,devmajor:t.devmajor,devminor:t.devminor};this.push(NQ.encode(r)),this.push(e),e5(this,e.length),r.size=t.size,r.type=t.type,this.push(NQ.encode(r))};EA.prototype._read=function(t){var e=this._drain;this._drain=t5,e()};$2e.exports=EA});var tBe=_(r5=>{r5.extract=K2e();r5.pack=eBe()});var pBe=_((fer,fBe)=>{"use strict";var Av=class t{constructor(e,r,o){this.__specs=e||{},Object.keys(this.__specs).forEach(a=>{if(typeof this.__specs[a]=="string"){let n=this.__specs[a],u=this.__specs[n];if(u){let A=u.aliases||[];A.push(a,n),u.aliases=[...new Set(A)],this.__specs[a]=u}else throw new Error(`Alias refers to invalid key: ${n} -> ${a}`)}}),this.__opts=r||{},this.__providers=uBe(o.filter(a=>a!=null&&typeof a=="object")),this.__isFiggyPudding=!0}get(e){return l5(this,e,!0)}get[Symbol.toStringTag](){return"FiggyPudding"}forEach(e,r=this){for(let[o,a]of this.entries())e.call(r,a,o,this)}toJSON(){let e={};return this.forEach((r,o)=>{e[o]=r}),e}*entries(e){for(let o of Object.keys(this.__specs))yield[o,this.get(o)];let r=e||this.__opts.other;if(r){let o=new Set;for(let a of this.__providers){let n=a.entries?a.entries(r):vvt(a);for(let[u,A]of n)r(u)&&!o.has(u)&&(o.add(u),yield[u,A])}}}*[Symbol.iterator](){for(let[e,r]of this.entries())yield[e,r]}*keys(){for(let[e]of this.entries())yield e}*values(){for(let[,e]of this.entries())yield e}concat(...e){return new Proxy(new t(this.__specs,this.__opts,uBe(this.__providers).concat(e)),ABe)}};try{let t=ve("util");Av.prototype[t.inspect.custom]=function(e,r){return this[Symbol.toStringTag]+" "+t.inspect(this.toJSON(),r)}}catch{}function Ivt(t){throw Object.assign(new Error(`invalid config key requested: ${t}`),{code:"EBADKEY"})}function l5(t,e,r){let o=t.__specs[e];if(r&&!o&&(!t.__opts.other||!t.__opts.other(e)))Ivt(e);else{o||(o={});let a;for(let n of t.__providers){if(a=cBe(e,n),a===void 0&&o.aliases&&o.aliases.length){for(let u of o.aliases)if(u!==e&&(a=cBe(u,n),a!==void 0))break}if(a!==void 0)break}return a===void 0&&o.default!==void 0?typeof o.default=="function"?o.default(t):o.default:a}}function cBe(t,e){let r;return e.__isFiggyPudding?r=l5(e,t,!1):typeof e.get=="function"?r=e.get(t):r=e[t],r}var ABe={has(t,e){return e in t.__specs&&l5(t,e,!1)!==void 0},ownKeys(t){return Object.keys(t.__specs)},get(t,e){return typeof e=="symbol"||e.slice(0,2)==="__"||e in Av.prototype?t[e]:t.get(e)},set(t,e,r){if(typeof e=="symbol"||e.slice(0,2)==="__")return t[e]=r,!0;throw new Error("figgyPudding options cannot be modified. Use .concat() instead.")},deleteProperty(){throw new Error("figgyPudding options cannot be deleted. Use .concat() and shadow them instead.")}};fBe.exports=Bvt;function Bvt(t,e){function r(...o){return new Proxy(new Av(t,e,o),ABe)}return r}function uBe(t){let e=[];return t.forEach(r=>e.unshift(r)),e}function vvt(t){return Object.keys(t).map(e=>[e,t[e]])}});var dBe=_((per,IA)=>{"use strict";var pv=ve("crypto"),Dvt=pBe(),Pvt=ve("stream").Transform,hBe=["sha256","sha384","sha512"],Svt=/^[a-z0-9+/]+(?:=?=?)$/i,bvt=/^([^-]+)-([^?]+)([?\S*]*)$/,xvt=/^([^-]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)*$/,kvt=/^[\x21-\x7E]+$/,oa=Dvt({algorithms:{default:["sha512"]},error:{default:!1},integrity:{},options:{default:[]},pickAlgorithm:{default:()=>Ovt},Promise:{default:()=>Promise},sep:{default:" "},single:{default:!1},size:{},strict:{default:!1}}),Zh=class{get isHash(){return!0}constructor(e,r){r=oa(r);let o=!!r.strict;this.source=e.trim();let a=this.source.match(o?xvt:bvt);if(!a||o&&!hBe.some(u=>u===a[1]))return;this.algorithm=a[1],this.digest=a[2];let n=a[3];this.options=n?n.slice(1).split("?"):[]}hexDigest(){return this.digest&&Buffer.from(this.digest,"base64").toString("hex")}toJSON(){return this.toString()}toString(e){if(e=oa(e),e.strict&&!(hBe.some(o=>o===this.algorithm)&&this.digest.match(Svt)&&(this.options||[]).every(o=>o.match(kvt))))return"";let r=this.options&&this.options.length?`?${this.options.join("?")}`:"";return`${this.algorithm}-${this.digest}${r}`}},fd=class{get isIntegrity(){return!0}toJSON(){return this.toString()}toString(e){e=oa(e);let r=e.sep||" ";return e.strict&&(r=r.replace(/\S+/g," ")),Object.keys(this).map(o=>this[o].map(a=>Zh.prototype.toString.call(a,e)).filter(a=>a.length).join(r)).filter(o=>o.length).join(r)}concat(e,r){r=oa(r);let o=typeof e=="string"?e:fv(e,r);return wA(`${this.toString(r)} ${o}`,r)}hexDigest(){return wA(this,{single:!0}).hexDigest()}match(e,r){r=oa(r);let o=wA(e,r),a=o.pickAlgorithm(r);return this[a]&&o[a]&&this[a].find(n=>o[a].find(u=>n.digest===u.digest))||!1}pickAlgorithm(e){e=oa(e);let r=e.pickAlgorithm,o=Object.keys(this);if(!o.length)throw new Error(`No algorithms available for ${JSON.stringify(this.toString())}`);return o.reduce((a,n)=>r(a,n)||a)}};IA.exports.parse=wA;function wA(t,e){if(e=oa(e),typeof t=="string")return c5(t,e);if(t.algorithm&&t.digest){let r=new fd;return r[t.algorithm]=[t],c5(fv(r,e),e)}else return c5(fv(t,e),e)}function c5(t,e){return e.single?new Zh(t,e):t.trim().split(/\s+/).reduce((r,o)=>{let a=new Zh(o,e);if(a.algorithm&&a.digest){let n=a.algorithm;r[n]||(r[n]=[]),r[n].push(a)}return r},new fd)}IA.exports.stringify=fv;function fv(t,e){return e=oa(e),t.algorithm&&t.digest?Zh.prototype.toString.call(t,e):typeof t=="string"?fv(wA(t,e),e):fd.prototype.toString.call(t,e)}IA.exports.fromHex=Qvt;function Qvt(t,e,r){r=oa(r);let o=r.options&&r.options.length?`?${r.options.join("?")}`:"";return wA(`${e}-${Buffer.from(t,"hex").toString("base64")}${o}`,r)}IA.exports.fromData=Fvt;function Fvt(t,e){e=oa(e);let r=e.algorithms,o=e.options&&e.options.length?`?${e.options.join("?")}`:"";return r.reduce((a,n)=>{let u=pv.createHash(n).update(t).digest("base64"),A=new Zh(`${n}-${u}${o}`,e);if(A.algorithm&&A.digest){let p=A.algorithm;a[p]||(a[p]=[]),a[p].push(A)}return a},new fd)}IA.exports.fromStream=Rvt;function Rvt(t,e){e=oa(e);let r=e.Promise||Promise,o=u5(e);return new r((a,n)=>{t.pipe(o),t.on("error",n),o.on("error",n);let u;o.on("integrity",A=>{u=A}),o.on("end",()=>a(u)),o.on("data",()=>{})})}IA.exports.checkData=Tvt;function Tvt(t,e,r){if(r=oa(r),e=wA(e,r),!Object.keys(e).length){if(r.error)throw Object.assign(new Error("No valid integrity hashes to check against"),{code:"EINTEGRITY"});return!1}let o=e.pickAlgorithm(r),a=pv.createHash(o).update(t).digest("base64"),n=wA({algorithm:o,digest:a}),u=n.match(e,r);if(u||!r.error)return u;if(typeof r.size=="number"&&t.length!==r.size){let A=new Error(`data size mismatch when checking ${e}. + Wanted: ${r.size} + Found: ${t.length}`);throw A.code="EBADSIZE",A.found=t.length,A.expected=r.size,A.sri=e,A}else{let A=new Error(`Integrity checksum failed when using ${o}: Wanted ${e}, but got ${n}. (${t.length} bytes)`);throw A.code="EINTEGRITY",A.found=n,A.expected=e,A.algorithm=o,A.sri=e,A}}IA.exports.checkStream=Nvt;function Nvt(t,e,r){r=oa(r);let o=r.Promise||Promise,a=u5(r.concat({integrity:e}));return new o((n,u)=>{t.pipe(a),t.on("error",u),a.on("error",u);let A;a.on("verified",p=>{A=p}),a.on("end",()=>n(A)),a.on("data",()=>{})})}IA.exports.integrityStream=u5;function u5(t){t=oa(t);let e=t.integrity&&wA(t.integrity,t),r=e&&Object.keys(e).length,o=r&&e.pickAlgorithm(t),a=r&&e[o],n=Array.from(new Set(t.algorithms.concat(o?[o]:[]))),u=n.map(pv.createHash),A=0,p=new Pvt({transform(h,E,I){A+=h.length,u.forEach(v=>v.update(h,E)),I(null,h,E)}}).on("end",()=>{let h=t.options&&t.options.length?`?${t.options.join("?")}`:"",E=wA(u.map((v,x)=>`${n[x]}-${v.digest("base64")}${h}`).join(" "),t),I=r&&E.match(e,t);if(typeof t.size=="number"&&A!==t.size){let v=new Error(`stream size mismatch when checking ${e}. + Wanted: ${t.size} + Found: ${A}`);v.code="EBADSIZE",v.found=A,v.expected=t.size,v.sri=e,p.emit("error",v)}else if(t.integrity&&!I){let v=new Error(`${e} integrity checksum failed when using ${o}: wanted ${a} but got ${E}. (${A} bytes)`);v.code="EINTEGRITY",v.found=E,v.expected=a,v.algorithm=o,v.sri=e,p.emit("error",v)}else p.emit("size",A),p.emit("integrity",E),I&&p.emit("verified",I)});return p}IA.exports.create=Lvt;function Lvt(t){t=oa(t);let e=t.algorithms,r=t.options.length?`?${t.options.join("?")}`:"",o=e.map(pv.createHash);return{update:function(a,n){return o.forEach(u=>u.update(a,n)),this},digest:function(a){return e.reduce((u,A)=>{let p=o.shift().digest("base64"),h=new Zh(`${A}-${p}${r}`,t);if(h.algorithm&&h.digest){let E=h.algorithm;u[E]||(u[E]=[]),u[E].push(h)}return u},new fd)}}}var Mvt=new Set(pv.getHashes()),gBe=["md5","whirlpool","sha1","sha224","sha256","sha384","sha512","sha3","sha3-256","sha3-384","sha3-512","sha3_256","sha3_384","sha3_512"].filter(t=>Mvt.has(t));function Ovt(t,e){return gBe.indexOf(t.toLowerCase())>=gBe.indexOf(e.toLowerCase())?t:e}});var YBe=_((dir,GBe)=>{var ODt=$N();function UDt(t){return ODt(t)?void 0:t}GBe.exports=UDt});var KBe=_((mir,WBe)=>{var _Dt=kb(),HDt=B8(),qDt=S8(),jDt=Mg(),GDt=Ag(),YDt=YBe(),WDt=m_(),KDt=I8(),VDt=1,zDt=2,JDt=4,XDt=WDt(function(t,e){var r={};if(t==null)return r;var o=!1;e=_Dt(e,function(n){return n=jDt(n,t),o||(o=n.length>1),n}),GDt(t,KDt(t),r),o&&(r=HDt(r,VDt|zDt|JDt,YDt));for(var a=e.length;a--;)qDt(r,e[a]);return r});WBe.exports=XDt});Pt();Ge();Pt();var ZBe=ve("child_process"),$Be=Ze(X0());qt();var Uy=new Map([]);var W1={};Vt(W1,{BaseCommand:()=>ut,WorkspaceRequiredError:()=>sr,getCli:()=>ihe,getDynamicLibs:()=>nhe,getPluginConfiguration:()=>Hy,openWorkspace:()=>_y,pluginCommands:()=>Uy,runExit:()=>Wx});qt();var ut=class extends it{constructor(){super(...arguments);this.cwd=ge.String("--cwd",{hidden:!0})}validateAndExecute(){if(typeof this.cwd<"u")throw new st("The --cwd option is ambiguous when used anywhere else than the very first parameter provided in the command line, before even the command path");return super.validateAndExecute()}};Ge();Pt();qt();var sr=class extends st{constructor(e,r){let o=K.relative(e,r),a=K.join(e,Ut.fileName);super(`This command can only be run from within a workspace of your project (${o} isn't a workspace of ${a}).`)}};Ge();Pt();nA();Nl();g1();qt();var OAt=Ze(Jn());el();var nhe=()=>new Map([["@yarnpkg/cli",W1],["@yarnpkg/core",Y1],["@yarnpkg/fslib",kw],["@yarnpkg/libzip",p1],["@yarnpkg/parsers",Ow],["@yarnpkg/shell",E1],["clipanion",Jw],["semver",OAt],["typanion",Vo]]);Ge();async function _y(t,e){let{project:r,workspace:o}=await kt.find(t,e);if(!o)throw new sr(r.cwd,e);return o}Ge();Pt();nA();Nl();g1();qt();var oPt=Ze(Jn());el();var K8={};Vt(K8,{AddCommand:()=>Yy,BinCommand:()=>Wy,CacheCleanCommand:()=>Ky,ClipanionCommand:()=>$y,ConfigCommand:()=>Xy,ConfigGetCommand:()=>Vy,ConfigSetCommand:()=>zy,ConfigUnsetCommand:()=>Jy,DedupeCommand:()=>Zy,EntryCommand:()=>tE,ExecCommand:()=>nE,ExplainCommand:()=>oE,ExplainPeerRequirementsCommand:()=>iE,HelpCommand:()=>eE,InfoCommand:()=>aE,LinkCommand:()=>cE,NodeCommand:()=>uE,PluginCheckCommand:()=>AE,PluginImportCommand:()=>hE,PluginImportSourcesCommand:()=>gE,PluginListCommand:()=>fE,PluginRemoveCommand:()=>dE,PluginRuntimeCommand:()=>mE,RebuildCommand:()=>yE,RemoveCommand:()=>EE,RunCommand:()=>wE,RunIndexCommand:()=>CE,SetResolutionCommand:()=>IE,SetVersionCommand:()=>sE,SetVersionSourcesCommand:()=>pE,UnlinkCommand:()=>BE,UpCommand:()=>vE,VersionCommand:()=>rE,WhyCommand:()=>DE,WorkspaceCommand:()=>kE,WorkspacesListCommand:()=>xE,YarnCommand:()=>lE,dedupeUtils:()=>rk,default:()=>Fgt,suggestUtils:()=>Zc});var Nde=Ze(X0());Ge();Ge();Ge();qt();var Y0e=Ze(J1());el();var Zc={};Vt(Zc,{Modifier:()=>m8,Strategy:()=>$x,Target:()=>X1,WorkspaceModifier:()=>_0e,applyModifier:()=>ipt,extractDescriptorFromPath:()=>y8,extractRangeModifier:()=>H0e,fetchDescriptorFrom:()=>E8,findProjectDescriptors:()=>G0e,getModifier:()=>Z1,getSuggestedDescriptors:()=>$1,makeWorkspaceDescriptor:()=>j0e,toWorkspaceModifier:()=>q0e});Ge();Ge();Pt();var d8=Ze(Jn()),rpt="workspace:",X1=(o=>(o.REGULAR="dependencies",o.DEVELOPMENT="devDependencies",o.PEER="peerDependencies",o))(X1||{}),m8=(o=>(o.CARET="^",o.TILDE="~",o.EXACT="",o))(m8||{}),_0e=(o=>(o.CARET="^",o.TILDE="~",o.EXACT="*",o))(_0e||{}),$x=(n=>(n.KEEP="keep",n.REUSE="reuse",n.PROJECT="project",n.LATEST="latest",n.CACHE="cache",n))($x||{});function Z1(t,e){return t.exact?"":t.caret?"^":t.tilde?"~":e.configuration.get("defaultSemverRangePrefix")}var npt=/^([\^~]?)[0-9]+(?:\.[0-9]+){0,2}(?:-\S+)?$/;function H0e(t,{project:e}){let r=t.match(npt);return r?r[1]:e.configuration.get("defaultSemverRangePrefix")}function ipt(t,e){let{protocol:r,source:o,params:a,selector:n}=G.parseRange(t.range);return d8.default.valid(n)&&(n=`${e}${t.range}`),G.makeDescriptor(t,G.makeRange({protocol:r,source:o,params:a,selector:n}))}function q0e(t){switch(t){case"^":return"^";case"~":return"~";case"":return"*";default:throw new Error(`Assertion failed: Unknown modifier: "${t}"`)}}function j0e(t,e){return G.makeDescriptor(t.anchoredDescriptor,`${rpt}${q0e(e)}`)}async function G0e(t,{project:e,target:r}){let o=new Map,a=n=>{let u=o.get(n.descriptorHash);return u||o.set(n.descriptorHash,u={descriptor:n,locators:[]}),u};for(let n of e.workspaces)if(r==="peerDependencies"){let u=n.manifest.peerDependencies.get(t.identHash);u!==void 0&&a(u).locators.push(n.anchoredLocator)}else{let u=n.manifest.dependencies.get(t.identHash),A=n.manifest.devDependencies.get(t.identHash);r==="devDependencies"?A!==void 0?a(A).locators.push(n.anchoredLocator):u!==void 0&&a(u).locators.push(n.anchoredLocator):u!==void 0?a(u).locators.push(n.anchoredLocator):A!==void 0&&a(A).locators.push(n.anchoredLocator)}return o}async function y8(t,{cwd:e,workspace:r}){return await spt(async o=>{K.isAbsolute(t)||(t=K.relative(r.cwd,K.resolve(e,t)),t.match(/^\.{0,2}\//)||(t=`./${t}`));let{project:a}=r,n=await E8(G.makeIdent(null,"archive"),t,{project:r.project,cache:o,workspace:r});if(!n)throw new Error("Assertion failed: The descriptor should have been found");let u=new ki,A=a.configuration.makeResolver(),p=a.configuration.makeFetcher(),h={checksums:a.storedChecksums,project:a,cache:o,fetcher:p,report:u,resolver:A},E=A.bindDescriptor(n,r.anchoredLocator,h),I=G.convertDescriptorToLocator(E),v=await p.fetch(I,h),x=await Ut.find(v.prefixPath,{baseFs:v.packageFs});if(!x.name)throw new Error("Target path doesn't have a name");return G.makeDescriptor(x.name,t)})}async function $1(t,{project:e,workspace:r,cache:o,target:a,fixed:n,modifier:u,strategies:A,maxResults:p=1/0}){if(!(p>=0))throw new Error(`Invalid maxResults (${p})`);let[h,E]=t.range!=="unknown"?n||Lr.validRange(t.range)||!t.range.match(/^[a-z0-9._-]+$/i)?[t.range,"latest"]:["unknown",t.range]:["unknown","latest"];if(h!=="unknown")return{suggestions:[{descriptor:t,name:`Use ${G.prettyDescriptor(e.configuration,t)}`,reason:"(unambiguous explicit request)"}],rejections:[]};let I=typeof r<"u"&&r!==null&&r.manifest[a].get(t.identHash)||null,v=[],x=[],C=async R=>{try{await R()}catch(L){x.push(L)}};for(let R of A){if(v.length>=p)break;switch(R){case"keep":await C(async()=>{I&&v.push({descriptor:I,name:`Keep ${G.prettyDescriptor(e.configuration,I)}`,reason:"(no changes)"})});break;case"reuse":await C(async()=>{for(let{descriptor:L,locators:U}of(await G0e(t,{project:e,target:a})).values()){if(U.length===1&&U[0].locatorHash===r.anchoredLocator.locatorHash&&A.includes("keep"))continue;let z=`(originally used by ${G.prettyLocator(e.configuration,U[0])}`;z+=U.length>1?` and ${U.length-1} other${U.length>2?"s":""})`:")",v.push({descriptor:L,name:`Reuse ${G.prettyDescriptor(e.configuration,L)}`,reason:z})}});break;case"cache":await C(async()=>{for(let L of e.storedDescriptors.values())L.identHash===t.identHash&&v.push({descriptor:L,name:`Reuse ${G.prettyDescriptor(e.configuration,L)}`,reason:"(already used somewhere in the lockfile)"})});break;case"project":await C(async()=>{if(r.manifest.name!==null&&t.identHash===r.manifest.name.identHash)return;let L=e.tryWorkspaceByIdent(t);if(L===null)return;let U=j0e(L,u);v.push({descriptor:U,name:`Attach ${G.prettyDescriptor(e.configuration,U)}`,reason:`(local workspace at ${pe.pretty(e.configuration,L.relativeCwd,pe.Type.PATH)})`})});break;case"latest":{let L=e.configuration.get("enableNetwork"),U=e.configuration.get("enableOfflineMode");await C(async()=>{if(a==="peerDependencies")v.push({descriptor:G.makeDescriptor(t,"*"),name:"Use *",reason:"(catch-all peer dependency pattern)"});else if(!L&&!U)v.push({descriptor:null,name:"Resolve from latest",reason:pe.pretty(e.configuration,"(unavailable because enableNetwork is toggled off)","grey")});else{let z=await E8(t,E,{project:e,cache:o,workspace:r,modifier:u});z&&v.push({descriptor:z,name:`Use ${G.prettyDescriptor(e.configuration,z)}`,reason:`(resolved from ${U?"the cache":"latest"})`})}})}break}}return{suggestions:v.slice(0,p),rejections:x.slice(0,p)}}async function E8(t,e,{project:r,cache:o,workspace:a,preserveModifier:n=!0,modifier:u}){let A=r.configuration.normalizeDependency(G.makeDescriptor(t,e)),p=new ki,h=r.configuration.makeFetcher(),E=r.configuration.makeResolver(),I={project:r,fetcher:h,cache:o,checksums:r.storedChecksums,report:p,cacheOptions:{skipIntegrityCheck:!0}},v={...I,resolver:E,fetchOptions:I},x=E.bindDescriptor(A,a.anchoredLocator,v),C=await E.getCandidates(x,{},v);if(C.length===0)return null;let R=C[0],{protocol:L,source:U,params:z,selector:te}=G.parseRange(G.convertToManifestRange(R.reference));if(L===r.configuration.get("defaultProtocol")&&(L=null),d8.default.valid(te)){let ae=te;if(typeof u<"u")te=u+te;else if(n!==!1){let Ce=typeof n=="string"?n:A.range;te=H0e(Ce,{project:r})+te}let le=G.makeDescriptor(R,G.makeRange({protocol:L,source:U,params:z,selector:te}));(await E.getCandidates(r.configuration.normalizeDependency(le),{},v)).length!==1&&(te=ae)}return G.makeDescriptor(R,G.makeRange({protocol:L,source:U,params:z,selector:te}))}async function spt(t){return await oe.mktempPromise(async e=>{let r=Ke.create(e);return r.useWithSource(e,{enableMirror:!1,compressionLevel:0},e,{overwrite:!0}),await t(new Gr(e,{configuration:r,check:!1,immutable:!1}))})}var Yy=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.fixed=ge.Boolean("-F,--fixed",!1,{description:"Store dependency tags as-is instead of resolving them"});this.exact=ge.Boolean("-E,--exact",!1,{description:"Don't use any semver modifier on the resolved range"});this.tilde=ge.Boolean("-T,--tilde",!1,{description:"Use the `~` semver modifier on the resolved range"});this.caret=ge.Boolean("-C,--caret",!1,{description:"Use the `^` semver modifier on the resolved range"});this.dev=ge.Boolean("-D,--dev",!1,{description:"Add a package as a dev dependency"});this.peer=ge.Boolean("-P,--peer",!1,{description:"Add a package as a peer dependency"});this.optional=ge.Boolean("-O,--optional",!1,{description:"Add / upgrade a package to an optional regular / peer dependency"});this.preferDev=ge.Boolean("--prefer-dev",!1,{description:"Add / upgrade a package to a dev dependency"});this.interactive=ge.Boolean("-i,--interactive",{description:"Reuse the specified package from other workspaces in the project"});this.cached=ge.Boolean("--cached",!1,{description:"Reuse the highest version already used somewhere within the project"});this.mode=ge.String("--mode",{description:"Change what artifacts installs generate",validator:Js(hl)});this.silent=ge.Boolean("--silent",{hidden:!0});this.packages=ge.Rest()}static{this.paths=[["add"]]}static{this.usage=it.Usage({description:"add dependencies to the project",details:"\n This command adds a package to the package.json for the nearest workspace.\n\n - If it didn't exist before, the package will by default be added to the regular `dependencies` field, but this behavior can be overriden thanks to the `-D,--dev` flag (which will cause the dependency to be added to the `devDependencies` field instead) and the `-P,--peer` flag (which will do the same but for `peerDependencies`).\n\n - If the package was already listed in your dependencies, it will by default be upgraded whether it's part of your `dependencies` or `devDependencies` (it won't ever update `peerDependencies`, though).\n\n - If set, the `--prefer-dev` flag will operate as a more flexible `-D,--dev` in that it will add the package to your `devDependencies` if it isn't already listed in either `dependencies` or `devDependencies`, but it will also happily upgrade your `dependencies` if that's what you already use (whereas `-D,--dev` would throw an exception).\n\n - If set, the `-O,--optional` flag will add the package to the `optionalDependencies` field and, in combination with the `-P,--peer` flag, it will add the package as an optional peer dependency. If the package was already listed in your `dependencies`, it will be upgraded to `optionalDependencies`. If the package was already listed in your `peerDependencies`, in combination with the `-P,--peer` flag, it will be upgraded to an optional peer dependency: `\"peerDependenciesMeta\": { \"\": { \"optional\": true } }`\n\n - If the added package doesn't specify a range at all its `latest` tag will be resolved and the returned version will be used to generate a new semver range (using the `^` modifier by default unless otherwise configured via the `defaultSemverRangePrefix` configuration, or the `~` modifier if `-T,--tilde` is specified, or no modifier at all if `-E,--exact` is specified). Two exceptions to this rule: the first one is that if the package is a workspace then its local version will be used, and the second one is that if you use `-P,--peer` the default range will be `*` and won't be resolved at all.\n\n - If the added package specifies a range (such as `^1.0.0`, `latest`, or `rc`), Yarn will add this range as-is in the resulting package.json entry (in particular, tags such as `rc` will be encoded as-is rather than being converted into a semver range).\n\n If the `--cached` option is used, Yarn will preferably reuse the highest version already used somewhere within the project, even if through a transitive dependency.\n\n If the `-i,--interactive` option is used (or if the `preferInteractive` settings is toggled on) the command will first try to check whether other workspaces in the project use the specified package and, if so, will offer to reuse them.\n\n If the `--mode=` option is set, Yarn will change which artifacts are generated. The modes currently supported are:\n\n - `skip-build` will not run the build scripts at all. Note that this is different from setting `enableScripts` to false because the latter will disable build scripts, and thus affect the content of the artifacts generated on disk, whereas the former will just disable the build step - but not the scripts themselves, which just won't run.\n\n - `update-lockfile` will skip the link step altogether, and only fetch packages that are missing from the lockfile (or that have no associated checksums). This mode is typically used by tools like Renovate or Dependabot to keep a lockfile up-to-date without incurring the full install cost.\n\n For a compilation of all the supported protocols, please consult the dedicated page from our website: https://yarnpkg.com/protocols.\n ",examples:[["Add a regular package to the current workspace","$0 add lodash"],["Add a specific version for a package to the current workspace","$0 add lodash@1.2.3"],["Add a package from a GitHub repository (the master branch) to the current workspace using a URL","$0 add lodash@https://github.com/lodash/lodash"],["Add a package from a GitHub repository (the master branch) to the current workspace using the GitHub protocol","$0 add lodash@github:lodash/lodash"],["Add a package from a GitHub repository (the master branch) to the current workspace using the GitHub protocol (shorthand)","$0 add lodash@lodash/lodash"],["Add a package from a specific branch of a GitHub repository to the current workspace using the GitHub protocol (shorthand)","$0 add lodash-es@lodash/lodash#es"],["Add a local package (gzipped tarball format) to the current workspace","$0 add local-package-name@file:../path/to/local-package-name-v0.1.2.tgz"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=this.fixed,A=r.isInteractive({interactive:this.interactive,stdout:this.context.stdout}),p=A||r.get("preferReuse"),h=Z1(this,o),E=[p?"reuse":void 0,"project",this.cached?"cache":void 0,"latest"].filter(U=>typeof U<"u"),I=A?1/0:1,v=await Promise.all(this.packages.map(async U=>{let z=U.match(/^\.{0,2}\//)?await y8(U,{cwd:this.context.cwd,workspace:a}):G.tryParseDescriptor(U),te=U.match(/^(https?:|git@github)/);if(te)throw new st(`It seems you are trying to add a package using a ${pe.pretty(r,`${te[0]}...`,pe.Type.RANGE)} url; we now require package names to be explicitly specified. +Try running the command again with the package name prefixed: ${pe.pretty(r,"yarn add",pe.Type.CODE)} ${pe.pretty(r,G.makeDescriptor(G.makeIdent(null,"my-package"),`${te[0]}...`),pe.Type.DESCRIPTOR)}`);if(!z)throw new st(`The ${pe.pretty(r,U,pe.Type.CODE)} string didn't match the required format (package-name@range). Did you perhaps forget to explicitly reference the package name?`);let ae=opt(a,z,{dev:this.dev,peer:this.peer,preferDev:this.preferDev,optional:this.optional});return await Promise.all(ae.map(async ce=>{let Ce=await $1(z,{project:o,workspace:a,cache:n,fixed:u,target:ce,modifier:h,strategies:E,maxResults:I});return{request:z,suggestedDescriptors:Ce,target:ce}}))})).then(U=>U.flat()),x=await AA.start({configuration:r,stdout:this.context.stdout,suggestInstall:!1},async U=>{for(let{request:z,suggestedDescriptors:{suggestions:te,rejections:ae}}of v)if(te.filter(ce=>ce.descriptor!==null).length===0){let[ce]=ae;if(typeof ce>"u")throw new Error("Assertion failed: Expected an error to have been set");o.configuration.get("enableNetwork")?U.reportError(27,`${G.prettyDescriptor(r,z)} can't be resolved to a satisfying range`):U.reportError(27,`${G.prettyDescriptor(r,z)} can't be resolved to a satisfying range (note: network resolution has been disabled)`),U.reportSeparator(),U.reportExceptionOnce(ce)}});if(x.hasErrors())return x.exitCode();let C=!1,R=[],L=[];for(let{suggestedDescriptors:{suggestions:U},target:z}of v){let te,ae=U.filter(de=>de.descriptor!==null),le=ae[0].descriptor,ce=ae.every(de=>G.areDescriptorsEqual(de.descriptor,le));ae.length===1||ce?te=le:(C=!0,{answer:te}=await(0,Y0e.prompt)({type:"select",name:"answer",message:"Which range do you want to use?",choices:U.map(({descriptor:de,name:Be,reason:Ee})=>de?{name:Be,hint:Ee,descriptor:de}:{name:Be,hint:Ee,disabled:!0}),onCancel:()=>process.exit(130),result(de){return this.find(de,"descriptor")},stdin:this.context.stdin,stdout:this.context.stdout}));let Ce=a.manifest[z].get(te.identHash);(typeof Ce>"u"||Ce.descriptorHash!==te.descriptorHash)&&(a.manifest[z].set(te.identHash,te),this.optional&&(z==="dependencies"?a.manifest.ensureDependencyMeta({...te,range:"unknown"}).optional=!0:z==="peerDependencies"&&(a.manifest.ensurePeerDependencyMeta({...te,range:"unknown"}).optional=!0)),typeof Ce>"u"?R.push([a,z,te,E]):L.push([a,z,Ce,te]))}return await r.triggerMultipleHooks(U=>U.afterWorkspaceDependencyAddition,R),await r.triggerMultipleHooks(U=>U.afterWorkspaceDependencyReplacement,L),C&&this.context.stdout.write(` +`),await o.installWithNewReport({json:this.json,stdout:this.context.stdout,quiet:this.context.quiet},{cache:n,mode:this.mode})}};function opt(t,e,{dev:r,peer:o,preferDev:a,optional:n}){let u=t.manifest.dependencies.has(e.identHash),A=t.manifest.devDependencies.has(e.identHash),p=t.manifest.peerDependencies.has(e.identHash);if((r||o)&&u)throw new st(`Package "${G.prettyIdent(t.project.configuration,e)}" is already listed as a regular dependency - remove the -D,-P flags or remove it from your dependencies first`);if(!r&&!o&&p)throw new st(`Package "${G.prettyIdent(t.project.configuration,e)}" is already listed as a peer dependency - use either of -D or -P, or remove it from your peer dependencies first`);if(n&&A)throw new st(`Package "${G.prettyIdent(t.project.configuration,e)}" is already listed as a dev dependency - remove the -O flag or remove it from your dev dependencies first`);if(n&&!o&&p)throw new st(`Package "${G.prettyIdent(t.project.configuration,e)}" is already listed as a peer dependency - remove the -O flag or add the -P flag or remove it from your peer dependencies first`);if((r||a)&&n)throw new st(`Package "${G.prettyIdent(t.project.configuration,e)}" cannot simultaneously be a dev dependency and an optional dependency`);let h=[];return o&&h.push("peerDependencies"),(r||a)&&h.push("devDependencies"),n&&h.push("dependencies"),h.length>0?h:A?["devDependencies"]:p?["peerDependencies"]:["dependencies"]}Ge();Ge();qt();var Wy=class extends ut{constructor(){super(...arguments);this.verbose=ge.Boolean("-v,--verbose",!1,{description:"Print both the binary name and the locator of the package that provides the binary"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.name=ge.String({required:!1})}static{this.paths=[["bin"]]}static{this.usage=it.Usage({description:"get the path to a binary script",details:` + When used without arguments, this command will print the list of all the binaries available in the current workspace. Adding the \`-v,--verbose\` flag will cause the output to contain both the binary name and the locator of the package that provides the binary. + + When an argument is specified, this command will just print the path to the binary on the standard output and exit. Note that the reported path may be stored within a zip archive. + `,examples:[["List all the available binaries","$0 bin"],["Print the path to a specific binary","$0 bin eslint"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,locator:a}=await kt.find(r,this.context.cwd);if(await o.restoreInstallState(),this.name){let A=(await An.getPackageAccessibleBinaries(a,{project:o})).get(this.name);if(!A)throw new st(`Couldn't find a binary named "${this.name}" for package "${G.prettyLocator(r,a)}"`);let[,p]=A;return this.context.stdout.write(`${p} +`),0}return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async u=>{let A=await An.getPackageAccessibleBinaries(a,{project:o}),h=Array.from(A.keys()).reduce((E,I)=>Math.max(E,I.length),0);for(let[E,[I,v]]of A)u.reportJson({name:E,source:G.stringifyIdent(I),path:v});if(this.verbose)for(let[E,[I]]of A)u.reportInfo(null,`${E.padEnd(h," ")} ${G.prettyLocator(r,I)}`);else for(let E of A.keys())u.reportInfo(null,E)})).exitCode()}};Ge();Pt();qt();var Ky=class extends ut{constructor(){super(...arguments);this.mirror=ge.Boolean("--mirror",!1,{description:"Remove the global cache files instead of the local cache files"});this.all=ge.Boolean("--all",!1,{description:"Remove both the global cache files and the local cache files of the current project"})}static{this.paths=[["cache","clean"],["cache","clear"]]}static{this.usage=it.Usage({description:"remove the shared cache files",details:` + This command will remove all the files from the cache. + `,examples:[["Remove all the local archives","$0 cache clean"],["Remove all the archives stored in the ~/.yarn directory","$0 cache clean --mirror"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=await Gr.find(r);return(await Rt.start({configuration:r,stdout:this.context.stdout},async()=>{let n=(this.all||this.mirror)&&o.mirrorCwd!==null,u=!this.mirror;n&&(await oe.removePromise(o.mirrorCwd),await r.triggerHook(A=>A.cleanGlobalArtifacts,r)),u&&await oe.removePromise(o.cwd)})).exitCode()}};Ge();qt();var K0e=Ze(e2()),C8=ve("util"),Vy=class extends ut{constructor(){super(...arguments);this.why=ge.Boolean("--why",!1,{description:"Print the explanation for why a setting has its value"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.unsafe=ge.Boolean("--no-redacted",!1,{description:"Don't redact secrets (such as tokens) from the output"});this.name=ge.String()}static{this.paths=[["config","get"]]}static{this.usage=it.Usage({description:"read a configuration settings",details:` + This command will print a configuration setting. + + Secrets (such as tokens) will be redacted from the output by default. If this behavior isn't desired, set the \`--no-redacted\` to get the untransformed value. + `,examples:[["Print a simple configuration setting","yarn config get yarnPath"],["Print a complex configuration setting","yarn config get packageExtensions"],["Print a nested field from the configuration",`yarn config get 'npmScopes["my-company"].npmRegistryServer'`],["Print a token from the configuration","yarn config get npmAuthToken --no-redacted"],["Print a configuration setting as JSON","yarn config get packageExtensions --json"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=this.name.replace(/[.[].*$/,""),a=this.name.replace(/^[^.[]*/,"");if(typeof r.settings.get(o)>"u")throw new st(`Couldn't find a configuration settings named "${o}"`);let u=r.getSpecial(o,{hideSecrets:!this.unsafe,getNativePaths:!0}),A=He.convertMapsToIndexableObjects(u),p=a?(0,K0e.default)(A,a):A,h=await Rt.start({configuration:r,includeFooter:!1,json:this.json,stdout:this.context.stdout},async E=>{E.reportJson(p)});if(!this.json){if(typeof p=="string")return this.context.stdout.write(`${p} +`),h.exitCode();C8.inspect.styles.name="cyan",this.context.stdout.write(`${(0,C8.inspect)(p,{depth:1/0,colors:r.get("enableColors"),compact:!1})} +`)}return h.exitCode()}};Ge();qt();var Mge=Ze(v8()),Oge=Ze(e2()),Uge=Ze(D8()),P8=ve("util"),zy=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Set complex configuration settings to JSON values"});this.home=ge.Boolean("-H,--home",!1,{description:"Update the home configuration instead of the project configuration"});this.name=ge.String();this.value=ge.String()}static{this.paths=[["config","set"]]}static{this.usage=it.Usage({description:"change a configuration settings",details:` + This command will set a configuration setting. + + When used without the \`--json\` flag, it can only set a simple configuration setting (a string, a number, or a boolean). + + When used with the \`--json\` flag, it can set both simple and complex configuration settings, including Arrays and Objects. + `,examples:[["Set a simple configuration setting (a string, a number, or a boolean)","yarn config set initScope myScope"],["Set a simple configuration setting (a string, a number, or a boolean) using the `--json` flag",'yarn config set initScope --json \\"myScope\\"'],["Set a complex configuration setting (an Array) using the `--json` flag",`yarn config set unsafeHttpWhitelist --json '["*.example.com", "example.com"]'`],["Set a complex configuration setting (an Object) using the `--json` flag",`yarn config set packageExtensions --json '{ "@babel/parser@*": { "dependencies": { "@babel/types": "*" } } }'`],["Set a nested configuration setting",'yarn config set npmScopes.company.npmRegistryServer "https://npm.example.com"'],["Set a nested configuration setting using indexed access for non-simple keys",`yarn config set 'npmRegistries["//npm.example.com"].npmAuthToken' "ffffffff-ffff-ffff-ffff-ffffffffffff"`]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=()=>{if(!r.projectCwd)throw new st("This command must be run from within a project folder");return r.projectCwd},a=this.name.replace(/[.[].*$/,""),n=this.name.replace(/^[^.[]*\.?/,"");if(typeof r.settings.get(a)>"u")throw new st(`Couldn't find a configuration settings named "${a}"`);if(a==="enableStrictSettings")throw new st("This setting only affects the file it's in, and thus cannot be set from the CLI");let A=this.json?JSON.parse(this.value):this.value;await(this.home?C=>Ke.updateHomeConfiguration(C):C=>Ke.updateConfiguration(o(),C))(C=>{if(n){let R=(0,Mge.default)(C);return(0,Uge.default)(R,this.name,A),R}else return{...C,[a]:A}});let E=(await Ke.find(this.context.cwd,this.context.plugins)).getSpecial(a,{hideSecrets:!0,getNativePaths:!0}),I=He.convertMapsToIndexableObjects(E),v=n?(0,Oge.default)(I,n):I;return(await Rt.start({configuration:r,includeFooter:!1,stdout:this.context.stdout},async C=>{P8.inspect.styles.name="cyan",C.reportInfo(0,`Successfully set ${this.name} to ${(0,P8.inspect)(v,{depth:1/0,colors:r.get("enableColors"),compact:!1})}`)})).exitCode()}};Ge();qt();var Jge=Ze(v8()),Xge=Ze(jge()),Zge=Ze(b8()),Jy=class extends ut{constructor(){super(...arguments);this.home=ge.Boolean("-H,--home",!1,{description:"Update the home configuration instead of the project configuration"});this.name=ge.String()}static{this.paths=[["config","unset"]]}static{this.usage=it.Usage({description:"unset a configuration setting",details:` + This command will unset a configuration setting. + `,examples:[["Unset a simple configuration setting","yarn config unset initScope"],["Unset a complex configuration setting","yarn config unset packageExtensions"],["Unset a nested configuration setting","yarn config unset npmScopes.company.npmRegistryServer"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=()=>{if(!r.projectCwd)throw new st("This command must be run from within a project folder");return r.projectCwd},a=this.name.replace(/[.[].*$/,""),n=this.name.replace(/^[^.[]*\.?/,"");if(typeof r.settings.get(a)>"u")throw new st(`Couldn't find a configuration settings named "${a}"`);let A=this.home?h=>Ke.updateHomeConfiguration(h):h=>Ke.updateConfiguration(o(),h);return(await Rt.start({configuration:r,includeFooter:!1,stdout:this.context.stdout},async h=>{let E=!1;await A(I=>{if(!(0,Xge.default)(I,this.name))return h.reportWarning(0,`Configuration doesn't contain setting ${this.name}; there is nothing to unset`),E=!0,I;let v=n?(0,Jge.default)(I):{...I};return(0,Zge.default)(v,this.name),v}),E||h.reportInfo(0,`Successfully unset ${this.name}`)})).exitCode()}};Ge();Pt();qt();var tk=ve("util"),Xy=class extends ut{constructor(){super(...arguments);this.noDefaults=ge.Boolean("--no-defaults",!1,{description:"Omit the default values from the display"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.verbose=ge.Boolean("-v,--verbose",{hidden:!0});this.why=ge.Boolean("--why",{hidden:!0});this.names=ge.Rest()}static{this.paths=[["config"]]}static{this.usage=it.Usage({description:"display the current configuration",details:` + This command prints the current active configuration settings. + `,examples:[["Print the active configuration settings","$0 config"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins,{strict:!1}),o=await uy({configuration:r,stdout:this.context.stdout,forceError:this.json},[{option:this.verbose,message:"The --verbose option is deprecated, the settings' descriptions are now always displayed"},{option:this.why,message:"The --why option is deprecated, the settings' sources are now always displayed"}]);if(o!==null)return o;let a=this.names.length>0?[...new Set(this.names)].sort():[...r.settings.keys()].sort(),n,u=await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,includeFooter:!1},async A=>{if(r.invalid.size>0&&!this.json){for(let[p,h]of r.invalid)A.reportError(34,`Invalid configuration key "${p}" in ${h}`);A.reportSeparator()}if(this.json)for(let p of a){let h=r.settings.get(p);typeof h>"u"&&A.reportError(34,`No configuration key named "${p}"`);let E=r.getSpecial(p,{hideSecrets:!0,getNativePaths:!0}),I=r.sources.get(p)??"",v=I&&I[0]!=="<"?ue.fromPortablePath(I):I;A.reportJson({key:p,effective:E,source:v,...h})}else{let p={breakLength:1/0,colors:r.get("enableColors"),maxArrayLength:2},h={},E={children:h};for(let I of a){if(this.noDefaults&&!r.sources.has(I))continue;let v=r.settings.get(I),x=r.sources.get(I)??"",C=r.getSpecial(I,{hideSecrets:!0,getNativePaths:!0}),R={Description:{label:"Description",value:pe.tuple(pe.Type.MARKDOWN,{text:v.description,format:this.cli.format(),paragraphs:!1})},Source:{label:"Source",value:pe.tuple(x[0]==="<"?pe.Type.CODE:pe.Type.PATH,x)}};h[I]={value:pe.tuple(pe.Type.CODE,I),children:R};let L=(U,z)=>{for(let[te,ae]of z)if(ae instanceof Map){let le={};U[te]={children:le},L(le,ae)}else U[te]={label:te,value:pe.tuple(pe.Type.NO_HINT,(0,tk.inspect)(ae,p))}};C instanceof Map?L(R,C):R.Value={label:"Value",value:pe.tuple(pe.Type.NO_HINT,(0,tk.inspect)(C,p))}}a.length!==1&&(n=void 0),fs.emitTree(E,{configuration:r,json:this.json,stdout:this.context.stdout,separators:2})}});if(!this.json&&typeof n<"u"){let A=a[0],p=(0,tk.inspect)(r.getSpecial(A,{hideSecrets:!0,getNativePaths:!0}),{colors:r.get("enableColors")});this.context.stdout.write(` +`),this.context.stdout.write(`${p} +`)}return u.exitCode()}};Ge();qt();el();var rk={};Vt(rk,{Strategy:()=>t2,acceptedStrategies:()=>q0t,dedupe:()=>x8});Ge();Ge();var $ge=Ze($o()),t2=(e=>(e.HIGHEST="highest",e))(t2||{}),q0t=new Set(Object.values(t2)),j0t={highest:async(t,e,{resolver:r,fetcher:o,resolveOptions:a,fetchOptions:n})=>{let u=new Map;for(let[p,h]of t.storedResolutions){let E=t.storedDescriptors.get(p);if(typeof E>"u")throw new Error(`Assertion failed: The descriptor (${p}) should have been registered`);He.getSetWithDefault(u,E.identHash).add(h)}let A=new Map(He.mapAndFilter(t.storedDescriptors.values(),p=>G.isVirtualDescriptor(p)?He.mapAndFilter.skip:[p.descriptorHash,He.makeDeferred()]));for(let p of t.storedDescriptors.values()){let h=A.get(p.descriptorHash);if(typeof h>"u")throw new Error(`Assertion failed: The descriptor (${p.descriptorHash}) should have been registered`);let E=t.storedResolutions.get(p.descriptorHash);if(typeof E>"u")throw new Error(`Assertion failed: The resolution (${p.descriptorHash}) should have been registered`);let I=t.originalPackages.get(E);if(typeof I>"u")throw new Error(`Assertion failed: The package (${E}) should have been registered`);Promise.resolve().then(async()=>{let v=r.getResolutionDependencies(p,a),x=Object.fromEntries(await He.allSettledSafe(Object.entries(v).map(async([te,ae])=>{let le=A.get(ae.descriptorHash);if(typeof le>"u")throw new Error(`Assertion failed: The descriptor (${ae.descriptorHash}) should have been registered`);let ce=await le.promise;if(!ce)throw new Error("Assertion failed: Expected the dependency to have been through the dedupe process itself");return[te,ce.updatedPackage]})));if(e.length&&!$ge.default.isMatch(G.stringifyIdent(p),e)||!r.shouldPersistResolution(I,a))return I;let C=u.get(p.identHash);if(typeof C>"u")throw new Error(`Assertion failed: The resolutions (${p.identHash}) should have been registered`);if(C.size===1)return I;let R=[...C].map(te=>{let ae=t.originalPackages.get(te);if(typeof ae>"u")throw new Error(`Assertion failed: The package (${te}) should have been registered`);return ae}),L=await r.getSatisfying(p,x,R,a),U=L.locators?.[0];if(typeof U>"u"||!L.sorted)return I;let z=t.originalPackages.get(U.locatorHash);if(typeof z>"u")throw new Error(`Assertion failed: The package (${U.locatorHash}) should have been registered`);return z}).then(async v=>{let x=await t.preparePackage(v,{resolver:r,resolveOptions:a});h.resolve({descriptor:p,currentPackage:I,updatedPackage:v,resolvedPackage:x})}).catch(v=>{h.reject(v)})}return[...A.values()].map(p=>p.promise)}};async function x8(t,{strategy:e,patterns:r,cache:o,report:a}){let{configuration:n}=t,u=new ki,A=n.makeResolver(),p=n.makeFetcher(),h={cache:o,checksums:t.storedChecksums,fetcher:p,project:t,report:u,cacheOptions:{skipIntegrityCheck:!0}},E={project:t,resolver:A,report:u,fetchOptions:h};return await a.startTimerPromise("Deduplication step",async()=>{let I=j0t[e],v=await I(t,r,{resolver:A,resolveOptions:E,fetcher:p,fetchOptions:h}),x=Zs.progressViaCounter(v.length);await a.reportProgress(x);let C=0;await Promise.all(v.map(U=>U.then(z=>{if(z===null||z.currentPackage.locatorHash===z.updatedPackage.locatorHash)return;C++;let{descriptor:te,currentPackage:ae,updatedPackage:le}=z;a.reportInfo(0,`${G.prettyDescriptor(n,te)} can be deduped from ${G.prettyLocator(n,ae)} to ${G.prettyLocator(n,le)}`),a.reportJson({descriptor:G.stringifyDescriptor(te),currentResolution:G.stringifyLocator(ae),updatedResolution:G.stringifyLocator(le)}),t.storedResolutions.set(te.descriptorHash,le.locatorHash)}).finally(()=>x.tick())));let R;switch(C){case 0:R="No packages";break;case 1:R="One package";break;default:R=`${C} packages`}let L=pe.pretty(n,e,pe.Type.CODE);return a.reportInfo(0,`${R} can be deduped using the ${L} strategy`),C})}var Zy=class extends ut{constructor(){super(...arguments);this.strategy=ge.String("-s,--strategy","highest",{description:"The strategy to use when deduping dependencies",validator:Js(t2)});this.check=ge.Boolean("-c,--check",!1,{description:"Exit with exit code 1 when duplicates are found, without persisting the dependency tree"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.mode=ge.String("--mode",{description:"Change what artifacts installs generate",validator:Js(hl)});this.patterns=ge.Rest()}static{this.paths=[["dedupe"]]}static{this.usage=it.Usage({description:"deduplicate dependencies with overlapping ranges",details:"\n Duplicates are defined as descriptors with overlapping ranges being resolved and locked to different locators. They are a natural consequence of Yarn's deterministic installs, but they can sometimes pile up and unnecessarily increase the size of your project.\n\n This command dedupes dependencies in the current project using different strategies (only one is implemented at the moment):\n\n - `highest`: Reuses (where possible) the locators with the highest versions. This means that dependencies can only be upgraded, never downgraded. It's also guaranteed that it never takes more than a single pass to dedupe the entire dependency tree.\n\n **Note:** Even though it never produces a wrong dependency tree, this command should be used with caution, as it modifies the dependency tree, which can sometimes cause problems when packages don't strictly follow semver recommendations. Because of this, it is recommended to also review the changes manually.\n\n If set, the `-c,--check` flag will only report the found duplicates, without persisting the modified dependency tree. If changes are found, the command will exit with a non-zero exit code, making it suitable for CI purposes.\n\n If the `--mode=` option is set, Yarn will change which artifacts are generated. The modes currently supported are:\n\n - `skip-build` will not run the build scripts at all. Note that this is different from setting `enableScripts` to false because the latter will disable build scripts, and thus affect the content of the artifacts generated on disk, whereas the former will just disable the build step - but not the scripts themselves, which just won't run.\n\n - `update-lockfile` will skip the link step altogether, and only fetch packages that are missing from the lockfile (or that have no associated checksums). This mode is typically used by tools like Renovate or Dependabot to keep a lockfile up-to-date without incurring the full install cost.\n\n This command accepts glob patterns as arguments (if valid Idents and supported by [micromatch](https://github.com/micromatch/micromatch)). Make sure to escape the patterns, to prevent your own shell from trying to expand them.\n\n ### In-depth explanation:\n\n Yarn doesn't deduplicate dependencies by default, otherwise installs wouldn't be deterministic and the lockfile would be useless. What it actually does is that it tries to not duplicate dependencies in the first place.\n\n **Example:** If `foo@^2.3.4` (a dependency of a dependency) has already been resolved to `foo@2.3.4`, running `yarn add foo@*`will cause Yarn to reuse `foo@2.3.4`, even if the latest `foo` is actually `foo@2.10.14`, thus preventing unnecessary duplication.\n\n Duplication happens when Yarn can't unlock dependencies that have already been locked inside the lockfile.\n\n **Example:** If `foo@^2.3.4` (a dependency of a dependency) has already been resolved to `foo@2.3.4`, running `yarn add foo@2.10.14` will cause Yarn to install `foo@2.10.14` because the existing resolution doesn't satisfy the range `2.10.14`. This behavior can lead to (sometimes) unwanted duplication, since now the lockfile contains 2 separate resolutions for the 2 `foo` descriptors, even though they have overlapping ranges, which means that the lockfile can be simplified so that both descriptors resolve to `foo@2.10.14`.\n ",examples:[["Dedupe all packages","$0 dedupe"],["Dedupe all packages using a specific strategy","$0 dedupe --strategy highest"],["Dedupe a specific package","$0 dedupe lodash"],["Dedupe all packages with the `@babel/*` scope","$0 dedupe '@babel/*'"],["Check for duplicates (can be used as a CI step)","$0 dedupe --check"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd),a=await Gr.find(r);await o.restoreInstallState({restoreResolutions:!1});let n=0,u=await Rt.start({configuration:r,includeFooter:!1,stdout:this.context.stdout,json:this.json},async A=>{n=await x8(o,{strategy:this.strategy,patterns:this.patterns,cache:a,report:A})});return u.hasErrors()?u.exitCode():this.check?n?1:0:await o.installWithNewReport({json:this.json,stdout:this.context.stdout},{cache:a,mode:this.mode})}};Ge();qt();var $y=class extends ut{static{this.paths=[["--clipanion=definitions"]]}async execute(){let{plugins:e}=await Ke.find(this.context.cwd,this.context.plugins),r=[];for(let u of e){let{commands:A}=u[1];if(A){let h=Jo.from(A).definitions();r.push([u[0],h])}}let o=this.cli.definitions(),a=(u,A)=>u.split(" ").slice(1).join()===A.split(" ").slice(1).join(),n=ede()["@yarnpkg/builder"].bundles.standard;for(let u of r){let A=u[1];for(let p of A)o.find(h=>a(h.path,p.path)).plugin={name:u[0],isDefault:n.includes(u[0])}}this.context.stdout.write(`${JSON.stringify(o,null,2)} +`)}};var eE=class extends ut{static{this.paths=[["help"],["--help"],["-h"]]}async execute(){this.context.stdout.write(this.cli.usage(null))}};Ge();Pt();qt();var tE=class extends ut{constructor(){super(...arguments);this.leadingArgument=ge.String();this.args=ge.Proxy()}async execute(){if(this.leadingArgument.match(/[\\/]/)&&!G.tryParseIdent(this.leadingArgument)){let r=K.resolve(this.context.cwd,ue.toPortablePath(this.leadingArgument));return await this.cli.run(this.args,{cwd:r})}else return await this.cli.run(["run",this.leadingArgument,...this.args])}};Ge();var rE=class extends ut{static{this.paths=[["-v"],["--version"]]}async execute(){this.context.stdout.write(`${nn||""} +`)}};Ge();Ge();qt();var nE=class extends ut{constructor(){super(...arguments);this.commandName=ge.String();this.args=ge.Proxy()}static{this.paths=[["exec"]]}static{this.usage=it.Usage({description:"execute a shell script",details:` + This command simply executes a shell script within the context of the root directory of the active workspace using the portable shell. + + It also makes sure to call it in a way that's compatible with the current project (for example, on PnP projects the environment will be setup in such a way that PnP will be correctly injected into the environment). + `,examples:[["Execute a single shell command","$0 exec echo Hello World"],["Execute a shell script",'$0 exec "tsc & babel src --out-dir lib"']]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,locator:a}=await kt.find(r,this.context.cwd);return await o.restoreInstallState(),await An.executePackageShellcode(a,this.commandName,this.args,{cwd:this.context.cwd,stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr,project:o})}};Ge();qt();el();var iE=class extends ut{constructor(){super(...arguments);this.hash=ge.String({required:!1,validator:YD(om(),[qw(/^p[0-9a-f]{5}$/)])})}static{this.paths=[["explain","peer-requirements"]]}static{this.usage=it.Usage({description:"explain a set of peer requirements",details:` + A peer requirement represents all peer requests that a subject must satisfy when providing a requested package to requesters. + + When the hash argument is specified, this command prints a detailed explanation of the peer requirement corresponding to the hash and whether it is satisfied or not. + + When used without arguments, this command lists all peer requirements and the corresponding hash that can be used to get detailed information about a given requirement. + + **Note:** A hash is a six-letter p-prefixed code that can be obtained from peer dependency warnings or from the list of all peer requirements (\`yarn explain peer-requirements\`). + `,examples:[["Explain the corresponding peer requirement for a hash","$0 explain peer-requirements p1a4ed"],["List all peer requirements","$0 explain peer-requirements"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd);return await o.restoreInstallState({restoreResolutions:!1}),await o.applyLightResolution(),typeof this.hash<"u"?await Y0t(this.hash,o,{stdout:this.context.stdout}):await W0t(o,{stdout:this.context.stdout})}};async function Y0t(t,e,r){let o=e.peerRequirementNodes.get(t);if(typeof o>"u")throw new Error(`No peerDependency requirements found for hash: "${t}"`);let a=new Set,n=p=>a.has(p.requester.locatorHash)?{value:pe.tuple(pe.Type.DEPENDENT,{locator:p.requester,descriptor:p.descriptor}),children:p.children.size>0?[{value:pe.tuple(pe.Type.NO_HINT,"...")}]:[]}:(a.add(p.requester.locatorHash),{value:pe.tuple(pe.Type.DEPENDENT,{locator:p.requester,descriptor:p.descriptor}),children:Object.fromEntries(Array.from(p.children.values(),h=>[G.stringifyLocator(h.requester),n(h)]))}),u=e.peerWarnings.find(p=>p.hash===t);return(await Rt.start({configuration:e.configuration,stdout:r.stdout,includeFooter:!1,includePrefix:!1},async p=>{let h=pe.mark(e.configuration),E=u?h.Cross:h.Check;if(p.reportInfo(0,`Package ${pe.pretty(e.configuration,o.subject,pe.Type.LOCATOR)} is requested to provide ${pe.pretty(e.configuration,o.ident,pe.Type.IDENT)} by its descendants`),p.reportSeparator(),p.reportInfo(0,pe.pretty(e.configuration,o.subject,pe.Type.LOCATOR)),fs.emitTree({children:Object.fromEntries(Array.from(o.requests.values(),I=>[G.stringifyLocator(I.requester),n(I)]))},{configuration:e.configuration,stdout:r.stdout,json:!1}),p.reportSeparator(),o.provided.range==="missing:"){let I=u?"":" , but all peer requests are optional";p.reportInfo(0,`${E} Package ${pe.pretty(e.configuration,o.subject,pe.Type.LOCATOR)} does not provide ${pe.pretty(e.configuration,o.ident,pe.Type.IDENT)}${I}.`)}else{let I=e.storedResolutions.get(o.provided.descriptorHash);if(!I)throw new Error("Assertion failed: Expected the descriptor to be registered");let v=e.storedPackages.get(I);if(!v)throw new Error("Assertion failed: Expected the package to be registered");p.reportInfo(0,`${E} Package ${pe.pretty(e.configuration,o.subject,pe.Type.LOCATOR)} provides ${pe.pretty(e.configuration,o.ident,pe.Type.IDENT)} with version ${G.prettyReference(e.configuration,v.version??"0.0.0")}, ${u?"which does not satisfy all requests.":"which satisfies all requests"}`),u?.type===3&&(u.range?p.reportInfo(0,` The combined requested range is ${pe.pretty(e.configuration,u.range,pe.Type.RANGE)}`):p.reportInfo(0," Unfortunately, the requested ranges have no overlap"))}})).exitCode()}async function W0t(t,e){return(await Rt.start({configuration:t.configuration,stdout:e.stdout,includeFooter:!1,includePrefix:!1},async o=>{let a=pe.mark(t.configuration),n=He.sortMap(t.peerRequirementNodes,[([,u])=>G.stringifyLocator(u.subject),([,u])=>G.stringifyIdent(u.ident)]);for(let[,u]of n.values()){if(!u.root)continue;let A=t.peerWarnings.find(E=>E.hash===u.hash),p=[...G.allPeerRequests(u)],h;if(p.length>2?h=` and ${p.length-1} other dependencies`:p.length===2?h=" and 1 other dependency":h="",u.provided.range!=="missing:"){let E=t.storedResolutions.get(u.provided.descriptorHash);if(!E)throw new Error("Assertion failed: Expected the resolution to have been registered");let I=t.storedPackages.get(E);if(!I)throw new Error("Assertion failed: Expected the provided package to have been registered");let v=`${pe.pretty(t.configuration,u.hash,pe.Type.CODE)} \u2192 ${A?a.Cross:a.Check} ${G.prettyLocator(t.configuration,u.subject)} provides ${G.prettyLocator(t.configuration,I)} to ${G.prettyLocator(t.configuration,p[0].requester)}${h}`;A?o.reportWarning(0,v):o.reportInfo(0,v)}else{let E=`${pe.pretty(t.configuration,u.hash,pe.Type.CODE)} \u2192 ${A?a.Cross:a.Check} ${G.prettyLocator(t.configuration,u.subject)} doesn't provide ${G.prettyIdent(t.configuration,u.ident)} to ${G.prettyLocator(t.configuration,p[0].requester)}${h}`;A?o.reportWarning(0,E):o.reportInfo(0,E)}}})).exitCode()}Ge();qt();el();Ge();Ge();Pt();qt();var tde=Ze(Jn()),sE=class extends ut{constructor(){super(...arguments);this.useYarnPath=ge.Boolean("--yarn-path",{description:"Set the yarnPath setting even if the version can be accessed by Corepack"});this.onlyIfNeeded=ge.Boolean("--only-if-needed",!1,{description:"Only lock the Yarn version if it isn't already locked"});this.version=ge.String()}static{this.paths=[["set","version"]]}static{this.usage=it.Usage({description:"lock the Yarn version used by the project",details:"\n This command will set a specific release of Yarn to be used by Corepack: https://nodejs.org/api/corepack.html.\n\n By default it only will set the `packageManager` field at the root of your project, but if the referenced release cannot be represented this way, if you already have `yarnPath` configured, or if you set the `--yarn-path` command line flag, then the release will also be downloaded from the Yarn GitHub repository, stored inside your project, and referenced via the `yarnPath` settings from your project `.yarnrc.yml` file.\n\n A very good use case for this command is to enforce the version of Yarn used by any single member of your team inside the same project - by doing this you ensure that you have control over Yarn upgrades and downgrades (including on your deployment servers), and get rid of most of the headaches related to someone using a slightly different version and getting different behavior.\n\n The version specifier can be:\n\n - a tag:\n - `latest` / `berry` / `stable` -> the most recent stable berry (`>=2.0.0`) release\n - `canary` -> the most recent canary (release candidate) berry (`>=2.0.0`) release\n - `classic` -> the most recent classic (`^0.x || ^1.x`) release\n\n - a semver range (e.g. `2.x`) -> the most recent version satisfying the range (limited to berry releases)\n\n - a semver version (e.g. `2.4.1`, `1.22.1`)\n\n - a local file referenced through either a relative or absolute path\n\n - `self` -> the version used to invoke the command\n ",examples:[["Download the latest release from the Yarn repository","$0 set version latest"],["Download the latest canary release from the Yarn repository","$0 set version canary"],["Download the latest classic release from the Yarn repository","$0 set version classic"],["Download the most recent Yarn 3 build","$0 set version 3.x"],["Download a specific Yarn 2 build","$0 set version 2.0.0-rc.30"],["Switch back to a specific Yarn 1 release","$0 set version 1.22.1"],["Use a release from the local filesystem","$0 set version ./yarn.cjs"],["Use a release from a URL","$0 set version https://repo.yarnpkg.com/3.1.0/packages/yarnpkg-cli/bin/yarn.js"],["Download the version used to invoke the command","$0 set version self"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);if(this.onlyIfNeeded&&r.get("yarnPath")){let A=r.sources.get("yarnPath");if(!A)throw new Error("Assertion failed: Expected 'yarnPath' to have a source");let p=r.projectCwd??r.startingCwd;if(K.contains(p,A))return 0}let o=()=>{if(typeof nn>"u")throw new st("The --install flag can only be used without explicit version specifier from the Yarn CLI");return`file://${process.argv[1]}`},a,n=(A,p)=>({version:p,url:A.replace(/\{\}/g,p)});if(this.version==="self")a={url:o(),version:nn??"self"};else if(this.version==="latest"||this.version==="berry"||this.version==="stable")a=n("https://repo.yarnpkg.com/{}/packages/yarnpkg-cli/bin/yarn.js",await r2(r,"stable"));else if(this.version==="canary")a=n("https://repo.yarnpkg.com/{}/packages/yarnpkg-cli/bin/yarn.js",await r2(r,"canary"));else if(this.version==="classic")a={url:"https://classic.yarnpkg.com/latest.js",version:"classic"};else if(this.version.match(/^https?:/))a={url:this.version,version:"remote"};else if(this.version.match(/^\.{0,2}[\\/]/)||ue.isAbsolute(this.version))a={url:`file://${K.resolve(ue.toPortablePath(this.version))}`,version:"file"};else if(Lr.satisfiesWithPrereleases(this.version,">=2.0.0"))a=n("https://repo.yarnpkg.com/{}/packages/yarnpkg-cli/bin/yarn.js",this.version);else if(Lr.satisfiesWithPrereleases(this.version,"^0.x || ^1.x"))a=n("https://github.com/yarnpkg/yarn/releases/download/v{}/yarn-{}.js",this.version);else if(Lr.validRange(this.version))a=n("https://repo.yarnpkg.com/{}/packages/yarnpkg-cli/bin/yarn.js",await K0t(r,this.version));else throw new st(`Invalid version descriptor "${this.version}"`);return(await Rt.start({configuration:r,stdout:this.context.stdout,includeLogs:!this.context.quiet},async A=>{let p=async()=>{let h="file://";return a.url.startsWith(h)?(A.reportInfo(0,`Retrieving ${pe.pretty(r,a.url,pe.Type.PATH)}`),await oe.readFilePromise(a.url.slice(h.length))):(A.reportInfo(0,`Downloading ${pe.pretty(r,a.url,pe.Type.URL)}`),await sn.get(a.url,{configuration:r}))};await k8(r,a.version,p,{report:A,useYarnPath:this.useYarnPath})})).exitCode()}};async function K0t(t,e){let o=(await sn.get("https://repo.yarnpkg.com/tags",{configuration:t,jsonResponse:!0})).tags.filter(a=>Lr.satisfiesWithPrereleases(a,e));if(o.length===0)throw new st(`No matching release found for range ${pe.pretty(t,e,pe.Type.RANGE)}.`);return o[0]}async function r2(t,e){let r=await sn.get("https://repo.yarnpkg.com/tags",{configuration:t,jsonResponse:!0});if(!r.latest[e])throw new st(`Tag ${pe.pretty(t,e,pe.Type.RANGE)} not found`);return r.latest[e]}async function k8(t,e,r,{report:o,useYarnPath:a}){let n,u=async()=>(typeof n>"u"&&(n=await r()),n);if(e===null){let te=await u();await oe.mktempPromise(async ae=>{let le=K.join(ae,"yarn.cjs");await oe.writeFilePromise(le,te);let{stdout:ce}=await Ur.execvp(process.execPath,[ue.fromPortablePath(le),"--version"],{cwd:ae,env:{...t.env,YARN_IGNORE_PATH:"1"}});if(e=ce.trim(),!tde.default.valid(e))throw new Error(`Invalid semver version. ${pe.pretty(t,"yarn --version",pe.Type.CODE)} returned: +${e}`)})}let A=t.projectCwd??t.startingCwd,p=K.resolve(A,".yarn/releases"),h=K.resolve(p,`yarn-${e}.cjs`),E=K.relative(t.startingCwd,h),I=He.isTaggedYarnVersion(e),v=t.get("yarnPath"),x=!I,C=x||!!v||!!a;if(a===!1){if(x)throw new Jt(0,"You explicitly opted out of yarnPath usage in your command line, but the version you specified cannot be represented by Corepack");C=!1}else!C&&!process.env.COREPACK_ROOT&&(o.reportWarning(0,`You don't seem to have ${pe.applyHyperlink(t,"Corepack","https://nodejs.org/api/corepack.html")} enabled; we'll have to rely on ${pe.applyHyperlink(t,"yarnPath","https://yarnpkg.com/configuration/yarnrc#yarnPath")} instead`),C=!0);if(C){let te=await u();o.reportInfo(0,`Saving the new release in ${pe.pretty(t,E,"magenta")}`),await oe.removePromise(K.dirname(h)),await oe.mkdirPromise(K.dirname(h),{recursive:!0}),await oe.writeFilePromise(h,te,{mode:493}),await Ke.updateConfiguration(A,{yarnPath:K.relative(A,h)})}else await oe.removePromise(K.dirname(h)),await Ke.updateConfiguration(A,{yarnPath:Ke.deleteProperty});let R=await Ut.tryFind(A)||new Ut;R.packageManager=`yarn@${I?e:await r2(t,"stable")}`;let L={};R.exportTo(L);let U=K.join(A,Ut.fileName),z=`${JSON.stringify(L,null,R.indent)} +`;return await oe.changeFilePromise(U,z,{automaticNewlines:!0}),{bundleVersion:e}}function rde(t){return wr[ZD(t)]}var V0t=/## (?YN[0-9]{4}) - `(?[A-Z_]+)`\n\n(?
      (?:.(?!##))+)/gs;async function z0t(t){let r=`https://repo.yarnpkg.com/${He.isTaggedYarnVersion(nn)?nn:await r2(t,"canary")}/packages/docusaurus/docs/advanced/01-general-reference/error-codes.mdx`,o=await sn.get(r,{configuration:t});return new Map(Array.from(o.toString().matchAll(V0t),({groups:a})=>{if(!a)throw new Error("Assertion failed: Expected the match to have been successful");let n=rde(a.code);if(a.name!==n)throw new Error(`Assertion failed: Invalid error code data: Expected "${a.name}" to be named "${n}"`);return[a.code,a.details]}))}var oE=class extends ut{constructor(){super(...arguments);this.code=ge.String({required:!1,validator:jw(om(),[qw(/^YN[0-9]{4}$/)])});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["explain"]]}static{this.usage=it.Usage({description:"explain an error code",details:` + When the code argument is specified, this command prints its name and its details. + + When used without arguments, this command lists all error codes and their names. + `,examples:[["Explain an error code","$0 explain YN0006"],["List all error codes","$0 explain"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);if(typeof this.code<"u"){let o=rde(this.code),a=pe.pretty(r,o,pe.Type.CODE),n=this.cli.format().header(`${this.code} - ${a}`),A=(await z0t(r)).get(this.code),p=typeof A<"u"?pe.jsonOrPretty(this.json,r,pe.tuple(pe.Type.MARKDOWN,{text:A,format:this.cli.format(),paragraphs:!0})):`This error code does not have a description. + +You can help us by editing this page on GitHub \u{1F642}: +${pe.jsonOrPretty(this.json,r,pe.tuple(pe.Type.URL,"https://github.com/yarnpkg/berry/blob/master/packages/docusaurus/docs/advanced/01-general-reference/error-codes.mdx"))} +`;this.json?this.context.stdout.write(`${JSON.stringify({code:this.code,name:o,details:p})} +`):this.context.stdout.write(`${n} + +${p} +`)}else{let o={children:He.mapAndFilter(Object.entries(wr),([a,n])=>Number.isNaN(Number(a))?He.mapAndFilter.skip:{label:Ku(Number(a)),value:pe.tuple(pe.Type.CODE,n)})};fs.emitTree(o,{configuration:r,stdout:this.context.stdout,json:this.json})}}};Ge();Pt();qt();var nde=Ze($o()),aE=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Print versions of a package from the whole project"});this.recursive=ge.Boolean("-R,--recursive",!1,{description:"Print information for all packages, including transitive dependencies"});this.extra=ge.Array("-X,--extra",[],{description:"An array of requests of extra data provided by plugins"});this.cache=ge.Boolean("--cache",!1,{description:"Print information about the cache entry of a package (path, size, checksum)"});this.dependents=ge.Boolean("--dependents",!1,{description:"Print all dependents for each matching package"});this.manifest=ge.Boolean("--manifest",!1,{description:"Print data obtained by looking at the package archive (license, homepage, ...)"});this.nameOnly=ge.Boolean("--name-only",!1,{description:"Only print the name for the matching packages"});this.virtuals=ge.Boolean("--virtuals",!1,{description:"Print each instance of the virtual packages"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.patterns=ge.Rest()}static{this.paths=[["info"]]}static{this.usage=it.Usage({description:"see information related to packages",details:"\n This command prints various information related to the specified packages, accepting glob patterns.\n\n By default, if the locator reference is missing, Yarn will default to print the information about all the matching direct dependencies of the package for the active workspace. To instead print all versions of the package that are direct dependencies of any of your workspaces, use the `-A,--all` flag. Adding the `-R,--recursive` flag will also report transitive dependencies.\n\n Some fields will be hidden by default in order to keep the output readable, but can be selectively displayed by using additional options (`--dependents`, `--manifest`, `--virtuals`, ...) described in the option descriptions.\n\n Note that this command will only print the information directly related to the selected packages - if you wish to know why the package is there in the first place, use `yarn why` which will do just that (it also provides a `-R,--recursive` flag that may be of some help).\n ",examples:[["Show information about Lodash","$0 info lodash"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a&&!this.all)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let u=new Set(this.extra);this.cache&&u.add("cache"),this.dependents&&u.add("dependents"),this.manifest&&u.add("manifest");let A=(ae,{recursive:le})=>{let ce=ae.anchoredLocator.locatorHash,Ce=new Map,de=[ce];for(;de.length>0;){let Be=de.shift();if(Ce.has(Be))continue;let Ee=o.storedPackages.get(Be);if(typeof Ee>"u")throw new Error("Assertion failed: Expected the package to be registered");if(Ce.set(Be,Ee),G.isVirtualLocator(Ee)&&de.push(G.devirtualizeLocator(Ee).locatorHash),!(!le&&Be!==ce))for(let g of Ee.dependencies.values()){let me=o.storedResolutions.get(g.descriptorHash);if(typeof me>"u")throw new Error("Assertion failed: Expected the resolution to be registered");de.push(me)}}return Ce.values()},p=({recursive:ae})=>{let le=new Map;for(let ce of o.workspaces)for(let Ce of A(ce,{recursive:ae}))le.set(Ce.locatorHash,Ce);return le.values()},h=({all:ae,recursive:le})=>ae&&le?o.storedPackages.values():ae?p({recursive:le}):A(a,{recursive:le}),E=({all:ae,recursive:le})=>{let ce=h({all:ae,recursive:le}),Ce=this.patterns.map(Ee=>{let g=G.parseLocator(Ee),me=nde.default.makeRe(G.stringifyIdent(g)),we=G.isVirtualLocator(g),Ae=we?G.devirtualizeLocator(g):g;return ne=>{let Z=G.stringifyIdent(ne);if(!me.test(Z))return!1;if(g.reference==="unknown")return!0;let xe=G.isVirtualLocator(ne),Ne=xe?G.devirtualizeLocator(ne):ne;return!(we&&xe&&g.reference!==ne.reference||Ae.reference!==Ne.reference)}}),de=He.sortMap([...ce],Ee=>G.stringifyLocator(Ee));return{selection:de.filter(Ee=>Ce.length===0||Ce.some(g=>g(Ee))),sortedLookup:de}},{selection:I,sortedLookup:v}=E({all:this.all,recursive:this.recursive});if(I.length===0)throw new st("No package matched your request");let x=new Map;if(this.dependents)for(let ae of v)for(let le of ae.dependencies.values()){let ce=o.storedResolutions.get(le.descriptorHash);if(typeof ce>"u")throw new Error("Assertion failed: Expected the resolution to be registered");He.getArrayWithDefault(x,ce).push(ae)}let C=new Map;for(let ae of v){if(!G.isVirtualLocator(ae))continue;let le=G.devirtualizeLocator(ae);He.getArrayWithDefault(C,le.locatorHash).push(ae)}let R={},L={children:R},U=r.makeFetcher(),z={project:o,fetcher:U,cache:n,checksums:o.storedChecksums,report:new ki,cacheOptions:{skipIntegrityCheck:!0}},te=[async(ae,le,ce)=>{if(!le.has("manifest"))return;let Ce=await U.fetch(ae,z),de;try{de=await Ut.find(Ce.prefixPath,{baseFs:Ce.packageFs})}finally{Ce.releaseFs?.()}ce("Manifest",{License:pe.tuple(pe.Type.NO_HINT,de.license),Homepage:pe.tuple(pe.Type.URL,de.raw.homepage??null)})},async(ae,le,ce)=>{if(!le.has("cache"))return;let Ce=o.storedChecksums.get(ae.locatorHash)??null,de=n.getLocatorPath(ae,Ce),Be;if(de!==null)try{Be=await oe.statPromise(de)}catch{}let Ee=typeof Be<"u"?[Be.size,pe.Type.SIZE]:void 0;ce("Cache",{Checksum:pe.tuple(pe.Type.NO_HINT,Ce),Path:pe.tuple(pe.Type.PATH,de),Size:Ee})}];for(let ae of I){let le=G.isVirtualLocator(ae);if(!this.virtuals&&le)continue;let ce={},Ce={value:[ae,pe.Type.LOCATOR],children:ce};if(R[G.stringifyLocator(ae)]=Ce,this.nameOnly){delete Ce.children;continue}let de=C.get(ae.locatorHash);typeof de<"u"&&(ce.Instances={label:"Instances",value:pe.tuple(pe.Type.NUMBER,de.length)}),ce.Version={label:"Version",value:pe.tuple(pe.Type.NO_HINT,ae.version)};let Be=(g,me)=>{let we={};if(ce[g]=we,Array.isArray(me))we.children=me.map(Ae=>({value:Ae}));else{let Ae={};we.children=Ae;for(let[ne,Z]of Object.entries(me))typeof Z>"u"||(Ae[ne]={label:ne,value:Z})}};if(!le){for(let g of te)await g(ae,u,Be);await r.triggerHook(g=>g.fetchPackageInfo,ae,u,Be)}ae.bin.size>0&&!le&&Be("Exported Binaries",[...ae.bin.keys()].map(g=>pe.tuple(pe.Type.PATH,g)));let Ee=x.get(ae.locatorHash);typeof Ee<"u"&&Ee.length>0&&Be("Dependents",Ee.map(g=>pe.tuple(pe.Type.LOCATOR,g))),ae.dependencies.size>0&&!le&&Be("Dependencies",[...ae.dependencies.values()].map(g=>{let me=o.storedResolutions.get(g.descriptorHash),we=typeof me<"u"?o.storedPackages.get(me)??null:null;return pe.tuple(pe.Type.RESOLUTION,{descriptor:g,locator:we})})),ae.peerDependencies.size>0&&le&&Be("Peer dependencies",[...ae.peerDependencies.values()].map(g=>{let me=ae.dependencies.get(g.identHash),we=typeof me<"u"?o.storedResolutions.get(me.descriptorHash)??null:null,Ae=we!==null?o.storedPackages.get(we)??null:null;return pe.tuple(pe.Type.RESOLUTION,{descriptor:g,locator:Ae})}))}fs.emitTree(L,{configuration:r,json:this.json,stdout:this.context.stdout,separators:this.nameOnly?0:2})}};Ge();Pt();Nl();var nk=Ze(X0());qt();var Q8=Ze(Jn());el();var J0t=[{selector:t=>t===-1,name:"nodeLinker",value:"node-modules"},{selector:t=>t!==-1&&t<8,name:"enableGlobalCache",value:!1},{selector:t=>t!==-1&&t<8,name:"compressionLevel",value:"mixed"}],lE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.immutable=ge.Boolean("--immutable",{description:"Abort with an error exit code if the lockfile was to be modified"});this.immutableCache=ge.Boolean("--immutable-cache",{description:"Abort with an error exit code if the cache folder was to be modified"});this.refreshLockfile=ge.Boolean("--refresh-lockfile",{description:"Refresh the package metadata stored in the lockfile"});this.checkCache=ge.Boolean("--check-cache",{description:"Always refetch the packages and ensure that their checksums are consistent"});this.checkResolutions=ge.Boolean("--check-resolutions",{description:"Validates that the package resolutions are coherent"});this.inlineBuilds=ge.Boolean("--inline-builds",{description:"Verbosely print the output of the build steps of dependencies"});this.mode=ge.String("--mode",{description:"Change what artifacts installs generate",validator:Js(hl)});this.cacheFolder=ge.String("--cache-folder",{hidden:!0});this.frozenLockfile=ge.Boolean("--frozen-lockfile",{hidden:!0});this.ignoreEngines=ge.Boolean("--ignore-engines",{hidden:!0});this.nonInteractive=ge.Boolean("--non-interactive",{hidden:!0});this.preferOffline=ge.Boolean("--prefer-offline",{hidden:!0});this.production=ge.Boolean("--production",{hidden:!0});this.registry=ge.String("--registry",{hidden:!0});this.silent=ge.Boolean("--silent",{hidden:!0});this.networkTimeout=ge.String("--network-timeout",{hidden:!0})}static{this.paths=[["install"],it.Default]}static{this.usage=it.Usage({description:"install the project dependencies",details:"\n This command sets up your project if needed. The installation is split into four different steps that each have their own characteristics:\n\n - **Resolution:** First the package manager will resolve your dependencies. The exact way a dependency version is privileged over another isn't standardized outside of the regular semver guarantees. If a package doesn't resolve to what you would expect, check that all dependencies are correctly declared (also check our website for more information: ).\n\n - **Fetch:** Then we download all the dependencies if needed, and make sure that they're all stored within our cache (check the value of `cacheFolder` in `yarn config` to see where the cache files are stored).\n\n - **Link:** Then we send the dependency tree information to internal plugins tasked with writing them on the disk in some form (for example by generating the `.pnp.cjs` file you might know).\n\n - **Build:** Once the dependency tree has been written on the disk, the package manager will now be free to run the build scripts for all packages that might need it, in a topological order compatible with the way they depend on one another. See https://yarnpkg.com/advanced/lifecycle-scripts for detail.\n\n Note that running this command is not part of the recommended workflow. Yarn supports zero-installs, which means that as long as you store your cache and your `.pnp.cjs` file inside your repository, everything will work without requiring any install right after cloning your repository or switching branches.\n\n If the `--immutable` option is set (defaults to true on CI), Yarn will abort with an error exit code if the lockfile was to be modified (other paths can be added using the `immutablePatterns` configuration setting). For backward compatibility we offer an alias under the name of `--frozen-lockfile`, but it will be removed in a later release.\n\n If the `--immutable-cache` option is set, Yarn will abort with an error exit code if the cache folder was to be modified (either because files would be added, or because they'd be removed).\n\n If the `--refresh-lockfile` option is set, Yarn will keep the same resolution for the packages currently in the lockfile but will refresh their metadata. If used together with `--immutable`, it can validate that the lockfile information are consistent. This flag is enabled by default when Yarn detects it runs within a pull request context.\n\n If the `--check-cache` option is set, Yarn will always refetch the packages and will ensure that their checksum matches what's 1/ described in the lockfile 2/ inside the existing cache files (if present). This is recommended as part of your CI workflow if you're both following the Zero-Installs model and accepting PRs from third-parties, as they'd otherwise have the ability to alter the checked-in packages before submitting them.\n\n If the `--inline-builds` option is set, Yarn will verbosely print the output of the build steps of your dependencies (instead of writing them into individual files). This is likely useful mostly for debug purposes only when using Docker-like environments.\n\n If the `--mode=` option is set, Yarn will change which artifacts are generated. The modes currently supported are:\n\n - `skip-build` will not run the build scripts at all. Note that this is different from setting `enableScripts` to false because the latter will disable build scripts, and thus affect the content of the artifacts generated on disk, whereas the former will just disable the build step - but not the scripts themselves, which just won't run.\n\n - `update-lockfile` will skip the link step altogether, and only fetch packages that are missing from the lockfile (or that have no associated checksums). This mode is typically used by tools like Renovate or Dependabot to keep a lockfile up-to-date without incurring the full install cost.\n ",examples:[["Install the project","$0 install"],["Validate a project when using Zero-Installs","$0 install --immutable --immutable-cache"],["Validate a project when using Zero-Installs (slightly safer if you accept external PRs)","$0 install --immutable --immutable-cache --check-cache"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);typeof this.inlineBuilds<"u"&&r.useWithSource("",{enableInlineBuilds:this.inlineBuilds},r.startingCwd,{overwrite:!0});let o=!!process.env.FUNCTION_TARGET||!!process.env.GOOGLE_RUNTIME,a=await uy({configuration:r,stdout:this.context.stdout},[{option:this.ignoreEngines,message:"The --ignore-engines option is deprecated; engine checking isn't a core feature anymore",error:!nk.default.VERCEL},{option:this.registry,message:"The --registry option is deprecated; prefer setting npmRegistryServer in your .yarnrc.yml file"},{option:this.preferOffline,message:"The --prefer-offline flag is deprecated; use the --cached flag with 'yarn add' instead",error:!nk.default.VERCEL},{option:this.production,message:"The --production option is deprecated on 'install'; use 'yarn workspaces focus' instead",error:!0},{option:this.nonInteractive,message:"The --non-interactive option is deprecated",error:!o},{option:this.frozenLockfile,message:"The --frozen-lockfile option is deprecated; use --immutable and/or --immutable-cache instead",callback:()=>this.immutable=this.frozenLockfile},{option:this.cacheFolder,message:"The cache-folder option has been deprecated; use rc settings instead",error:!nk.default.NETLIFY}]);if(a!==null)return a;let n=this.mode==="update-lockfile";if(n&&(this.immutable||this.immutableCache))throw new st(`${pe.pretty(r,"--immutable",pe.Type.CODE)} and ${pe.pretty(r,"--immutable-cache",pe.Type.CODE)} cannot be used with ${pe.pretty(r,"--mode=update-lockfile",pe.Type.CODE)}`);let u=(this.immutable??r.get("enableImmutableInstalls"))&&!n,A=this.immutableCache&&!n;if(r.projectCwd!==null){let R=await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,includeFooter:!1},async L=>{let U=!1;await $0t(r,u)&&(L.reportInfo(48,"Automatically removed core plugins that are now builtins \u{1F44D}"),U=!0),await Z0t(r,u)&&(L.reportInfo(48,"Automatically fixed merge conflicts \u{1F44D}"),U=!0),U&&L.reportSeparator()});if(R.hasErrors())return R.exitCode()}if(r.projectCwd!==null){let R=await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,includeFooter:!1},async L=>{if(Ke.telemetry?.isNew)Ke.telemetry.commitTips(),L.reportInfo(65,"Yarn will periodically gather anonymous telemetry: https://yarnpkg.com/advanced/telemetry"),L.reportInfo(65,`Run ${pe.pretty(r,"yarn config set --home enableTelemetry 0",pe.Type.CODE)} to disable`),L.reportSeparator();else if(Ke.telemetry?.shouldShowTips){let U=await sn.get("https://repo.yarnpkg.com/tags",{configuration:r,jsonResponse:!0}).catch(()=>null);if(U!==null){let z=null;if(nn!==null){let ae=Q8.default.prerelease(nn)?"canary":"stable",le=U.latest[ae];Q8.default.gt(le,nn)&&(z=[ae,le])}if(z)Ke.telemetry.commitTips(),L.reportInfo(88,`${pe.applyStyle(r,`A new ${z[0]} version of Yarn is available:`,pe.Style.BOLD)} ${G.prettyReference(r,z[1])}!`),L.reportInfo(88,`Upgrade now by running ${pe.pretty(r,`yarn set version ${z[1]}`,pe.Type.CODE)}`),L.reportSeparator();else{let te=Ke.telemetry.selectTip(U.tips);te&&(L.reportInfo(89,pe.pretty(r,te.message,pe.Type.MARKDOWN_INLINE)),te.url&&L.reportInfo(89,`Learn more at ${te.url}`),L.reportSeparator())}}}});if(R.hasErrors())return R.exitCode()}let{project:p,workspace:h}=await kt.find(r,this.context.cwd),E=p.lockfileLastVersion;if(E!==null){let R=await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,includeFooter:!1},async L=>{let U={};for(let z of J0t)z.selector(E)&&typeof r.sources.get(z.name)>"u"&&(r.use("",{[z.name]:z.value},p.cwd,{overwrite:!0}),U[z.name]=z.value);Object.keys(U).length>0&&(await Ke.updateConfiguration(p.cwd,U),L.reportInfo(87,"Migrated your project to the latest Yarn version \u{1F680}"),L.reportSeparator())});if(R.hasErrors())return R.exitCode()}let I=await Gr.find(r,{immutable:A,check:this.checkCache});if(!h)throw new sr(p.cwd,this.context.cwd);await p.restoreInstallState({restoreResolutions:!1});let v=r.get("enableHardenedMode");v&&typeof r.sources.get("enableHardenedMode")>"u"&&await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,includeFooter:!1},async R=>{R.reportWarning(0,"Yarn detected that the current workflow is executed from a public pull request. For safety the hardened mode has been enabled."),R.reportWarning(0,`It will prevent malicious lockfile manipulations, in exchange for a slower install time. You can opt-out if necessary; check our ${pe.applyHyperlink(r,"documentation","https://yarnpkg.com/features/security#hardened-mode")} for more details.`),R.reportSeparator()}),(this.refreshLockfile??v)&&(p.lockfileNeedsRefresh=!0);let x=this.checkResolutions??v;return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout,forceSectionAlignment:!0,includeLogs:!0,includeVersion:!0},async R=>{await p.install({cache:I,report:R,immutable:u,checkResolutions:x,mode:this.mode})})).exitCode()}},X0t="<<<<<<<";async function Z0t(t,e){if(!t.projectCwd)return!1;let r=K.join(t.projectCwd,dr.lockfile);if(!await oe.existsPromise(r)||!(await oe.readFilePromise(r,"utf8")).includes(X0t))return!1;if(e)throw new Jt(47,"Cannot autofix a lockfile when running an immutable install");let a=await Ur.execvp("git",["rev-parse","MERGE_HEAD","HEAD"],{cwd:t.projectCwd});if(a.code!==0&&(a=await Ur.execvp("git",["rev-parse","REBASE_HEAD","HEAD"],{cwd:t.projectCwd})),a.code!==0&&(a=await Ur.execvp("git",["rev-parse","CHERRY_PICK_HEAD","HEAD"],{cwd:t.projectCwd})),a.code!==0)throw new Jt(83,"Git returned an error when trying to find the commits pertaining to the conflict");let n=await Promise.all(a.stdout.trim().split(/\n/).map(async A=>{let p=await Ur.execvp("git",["show",`${A}:./${dr.lockfile}`],{cwd:t.projectCwd});if(p.code!==0)throw new Jt(83,`Git returned an error when trying to access the lockfile content in ${A}`);try{return Ki(p.stdout)}catch{throw new Jt(46,"A variant of the conflicting lockfile failed to parse")}}));n=n.filter(A=>!!A.__metadata);for(let A of n){if(A.__metadata.version<7)for(let p of Object.keys(A)){if(p==="__metadata")continue;let h=G.parseDescriptor(p,!0),E=t.normalizeDependency(h),I=G.stringifyDescriptor(E);I!==p&&(A[I]=A[p],delete A[p])}for(let p of Object.keys(A)){if(p==="__metadata")continue;let h=A[p].checksum;typeof h=="string"&&h.includes("/")||(A[p].checksum=`${A.__metadata.cacheKey}/${h}`)}}let u=Object.assign({},...n);u.__metadata.version=`${Math.min(...n.map(A=>parseInt(A.__metadata.version??0)))}`,u.__metadata.cacheKey="merged";for(let[A,p]of Object.entries(u))typeof p=="string"&&delete u[A];return await oe.changeFilePromise(r,Da(u),{automaticNewlines:!0}),!0}async function $0t(t,e){if(!t.projectCwd)return!1;let r=[],o=K.join(t.projectCwd,".yarn/plugins/@yarnpkg");return await Ke.updateConfiguration(t.projectCwd,{plugins:n=>{if(!Array.isArray(n))return n;let u=n.filter(A=>{if(!A.path)return!0;let p=K.resolve(t.projectCwd,A.path),h=l1.has(A.spec)&&K.contains(o,p);return h&&r.push(p),!h});return u.length===0?Ke.deleteProperty:u.length===n.length?n:u}},{immutable:e})?(await Promise.all(r.map(async n=>{await oe.removePromise(n)})),!0):!1}Ge();Pt();qt();var cE=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Link all workspaces belonging to the target projects to the current one"});this.private=ge.Boolean("-p,--private",!1,{description:"Also link private workspaces belonging to the target projects to the current one"});this.relative=ge.Boolean("-r,--relative",!1,{description:"Link workspaces using relative paths instead of absolute paths"});this.destinations=ge.Rest()}static{this.paths=[["link"]]}static{this.usage=it.Usage({description:"connect the local project to another one",details:"\n This command will set a new `resolutions` field in the project-level manifest and point it to the workspace at the specified location (even if part of another project).\n ",examples:[["Register one or more remote workspaces for use in the current project","$0 link ~/ts-loader ~/jest"],["Register all workspaces from a remote project for use in the current project","$0 link ~/jest --all"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=o.topLevelWorkspace,A=[];for(let p of this.destinations){let h=K.resolve(this.context.cwd,ue.toPortablePath(p)),E=await Ke.find(h,this.context.plugins,{useRc:!1,strict:!1}),{project:I,workspace:v}=await kt.find(E,h);if(o.cwd===I.cwd)throw new st(`Invalid destination '${p}'; Can't link the project to itself`);if(!v)throw new sr(I.cwd,h);if(this.all){let x=!1;for(let C of I.workspaces)C.manifest.name&&(!C.manifest.private||this.private)&&(A.push(C),x=!0);if(!x)throw new st(`No workspace found to be linked in the target project: ${p}`)}else{if(!v.manifest.name)throw new st(`The target workspace at '${p}' doesn't have a name and thus cannot be linked`);if(v.manifest.private&&!this.private)throw new st(`The target workspace at '${p}' is marked private - use the --private flag to link it anyway`);A.push(v)}}for(let p of A){let h=G.stringifyIdent(p.anchoredLocator),E=this.relative?K.relative(o.cwd,p.cwd):p.cwd;u.manifest.resolutions.push({pattern:{descriptor:{fullName:h}},reference:`portal:${E}`})}return await o.installWithNewReport({stdout:this.context.stdout},{cache:n})}};qt();var uE=class extends ut{constructor(){super(...arguments);this.args=ge.Proxy()}static{this.paths=[["node"]]}static{this.usage=it.Usage({description:"run node with the hook already setup",details:` + This command simply runs Node. It also makes sure to call it in a way that's compatible with the current project (for example, on PnP projects the environment will be setup in such a way that PnP will be correctly injected into the environment). + + The Node process will use the exact same version of Node as the one used to run Yarn itself, which might be a good way to ensure that your commands always use a consistent Node version. + `,examples:[["Run a Node script","$0 node ./my-script.js"]]})}async execute(){return this.cli.run(["exec","node",...this.args])}};Ge();qt();var AE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["plugin","check"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"find all third-party plugins that differ from their own spec",details:` + Check only the plugins from https. + + If this command detects any plugin differences in the CI environment, it will throw an error. + `,examples:[["find all third-party plugins that differ from their own spec","$0 plugin check"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=await Ke.findRcFiles(this.context.cwd);return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async n=>{for(let u of o)if(u.data?.plugins)for(let A of u.data.plugins){if(!A.checksum||!A.spec.match(/^https?:/))continue;let p=await sn.get(A.spec,{configuration:r}),h=wn.makeHash(p);if(A.checksum===h)continue;let E=pe.pretty(r,A.path,pe.Type.PATH),I=pe.pretty(r,A.spec,pe.Type.URL),v=`${E} is different from the file provided by ${I}`;n.reportJson({...A,newChecksum:h}),n.reportError(0,v)}})).exitCode()}};Ge();Ge();Pt();qt();var lde=ve("os");Ge();Pt();qt();var ide=ve("os");Ge();Nl();qt();var egt="https://raw.githubusercontent.com/yarnpkg/berry/master/plugins.yml";async function Hg(t,e){let r=await sn.get(egt,{configuration:t}),o=Ki(r.toString());return Object.fromEntries(Object.entries(o).filter(([a,n])=>!e||Lr.satisfiesWithPrereleases(e,n.range??"<4.0.0-rc.1")))}var fE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["plugin","list"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"list the available official plugins",details:"\n This command prints the plugins available directly from the Yarn repository. Only those plugins can be referenced by name in `yarn plugin import`.\n ",examples:[["List the official plugins","$0 plugin list"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async a=>{let n=await Hg(r,nn);for(let[u,{experimental:A,...p}]of Object.entries(n)){let h=u;A&&(h+=" [experimental]"),a.reportJson({name:u,experimental:A,...p}),a.reportInfo(null,h)}})).exitCode()}};var tgt=/^[0-9]+$/,rgt=process.platform==="win32";function sde(t){return tgt.test(t)?`pull/${t}/head`:t}var ngt=({repository:t,branch:e},r)=>[["git","init",ue.fromPortablePath(r)],["git","remote","add","origin",t],["git","fetch","origin","--depth=1",sde(e)],["git","reset","--hard","FETCH_HEAD"]],igt=({branch:t})=>[["git","fetch","origin","--depth=1",sde(t),"--force"],["git","reset","--hard","FETCH_HEAD"],["git","clean","-dfx","-e","packages/yarnpkg-cli/bundles"]],sgt=({plugins:t,noMinify:e},r,o)=>[["yarn","build:cli",...new Array().concat(...t.map(a=>["--plugin",K.resolve(o,a)])),...e?["--no-minify"]:[],"|"],[rgt?"move":"mv","packages/yarnpkg-cli/bundles/yarn.js",ue.fromPortablePath(r),"|"]],pE=class extends ut{constructor(){super(...arguments);this.installPath=ge.String("--path",{description:"The path where the repository should be cloned to"});this.repository=ge.String("--repository","https://github.com/yarnpkg/berry.git",{description:"The repository that should be cloned"});this.branch=ge.String("--branch","master",{description:"The branch of the repository that should be cloned"});this.plugins=ge.Array("--plugin",[],{description:"An array of additional plugins that should be included in the bundle"});this.dryRun=ge.Boolean("-n,--dry-run",!1,{description:"If set, the bundle will be built but not added to the project"});this.noMinify=ge.Boolean("--no-minify",!1,{description:"Build a bundle for development (debugging) - non-minified and non-mangled"});this.force=ge.Boolean("-f,--force",!1,{description:"Always clone the repository instead of trying to fetch the latest commits"});this.skipPlugins=ge.Boolean("--skip-plugins",!1,{description:"Skip updating the contrib plugins"})}static{this.paths=[["set","version","from","sources"]]}static{this.usage=it.Usage({description:"build Yarn from master",details:` + This command will clone the Yarn repository into a temporary folder, then build it. The resulting bundle will then be copied into the local project. + + By default, it also updates all contrib plugins to the same commit the bundle is built from. This behavior can be disabled by using the \`--skip-plugins\` flag. + `,examples:[["Build Yarn from master","$0 set version from sources"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd),a=typeof this.installPath<"u"?K.resolve(this.context.cwd,ue.toPortablePath(this.installPath)):K.resolve(ue.toPortablePath((0,ide.tmpdir)()),"yarnpkg-sources",wn.makeHash(this.repository).slice(0,6));return(await Rt.start({configuration:r,stdout:this.context.stdout},async u=>{await F8(this,{configuration:r,report:u,target:a}),u.reportSeparator(),u.reportInfo(0,"Building a fresh bundle"),u.reportSeparator();let A=await Ur.execvp("git",["rev-parse","--short","HEAD"],{cwd:a,strict:!0}),p=K.join(a,`packages/yarnpkg-cli/bundles/yarn-${A.stdout.trim()}.js`);oe.existsSync(p)||(await n2(sgt(this,p,a),{configuration:r,context:this.context,target:a}),u.reportSeparator());let h=await oe.readFilePromise(p);if(!this.dryRun){let{bundleVersion:E}=await k8(r,null,async()=>h,{report:u});this.skipPlugins||await ogt(this,E,{project:o,report:u,target:a})}})).exitCode()}};async function n2(t,{configuration:e,context:r,target:o}){for(let[a,...n]of t){let u=n[n.length-1]==="|";if(u&&n.pop(),u)await Ur.pipevp(a,n,{cwd:o,stdin:r.stdin,stdout:r.stdout,stderr:r.stderr,strict:!0});else{r.stdout.write(`${pe.pretty(e,` $ ${[a,...n].join(" ")}`,"grey")} +`);try{await Ur.execvp(a,n,{cwd:o,strict:!0})}catch(A){throw r.stdout.write(A.stdout||A.stack),A}}}}async function F8(t,{configuration:e,report:r,target:o}){let a=!1;if(!t.force&&oe.existsSync(K.join(o,".git"))){r.reportInfo(0,"Fetching the latest commits"),r.reportSeparator();try{await n2(igt(t),{configuration:e,context:t.context,target:o}),a=!0}catch{r.reportSeparator(),r.reportWarning(0,"Repository update failed; we'll try to regenerate it")}}a||(r.reportInfo(0,"Cloning the remote repository"),r.reportSeparator(),await oe.removePromise(o),await oe.mkdirPromise(o,{recursive:!0}),await n2(ngt(t,o),{configuration:e,context:t.context,target:o}))}async function ogt(t,e,{project:r,report:o,target:a}){let n=await Hg(r.configuration,e),u=new Set(Object.keys(n));for(let A of r.configuration.plugins.keys())u.has(A)&&await R8(A,t,{project:r,report:o,target:a})}Ge();Ge();Pt();qt();var ode=Ze(Jn()),ade=ve("vm");var hE=class extends ut{constructor(){super(...arguments);this.name=ge.String();this.checksum=ge.Boolean("--checksum",!0,{description:"Whether to care if this plugin is modified"})}static{this.paths=[["plugin","import"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"download a plugin",details:` + This command downloads the specified plugin from its remote location and updates the configuration to reference it in further CLI invocations. + + Three types of plugin references are accepted: + + - If the plugin is stored within the Yarn repository, it can be referenced by name. + - Third-party plugins can be referenced directly through their public urls. + - Local plugins can be referenced by their path on the disk. + + If the \`--no-checksum\` option is set, Yarn will no longer care if the plugin is modified. + + Plugins cannot be downloaded from the npm registry, and aren't allowed to have dependencies (they need to be bundled into a single file, possibly thanks to the \`@yarnpkg/builder\` package). + `,examples:[['Download and activate the "@yarnpkg/plugin-exec" plugin',"$0 plugin import @yarnpkg/plugin-exec"],['Download and activate the "@yarnpkg/plugin-exec" plugin (shorthand)',"$0 plugin import exec"],["Download and activate a community plugin","$0 plugin import https://example.org/path/to/plugin.js"],["Activate a local plugin","$0 plugin import ./path/to/plugin.js"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);return(await Rt.start({configuration:r,stdout:this.context.stdout},async a=>{let{project:n}=await kt.find(r,this.context.cwd),u,A;if(this.name.match(/^\.{0,2}[\\/]/)||ue.isAbsolute(this.name)){let p=K.resolve(this.context.cwd,ue.toPortablePath(this.name));a.reportInfo(0,`Reading ${pe.pretty(r,p,pe.Type.PATH)}`),u=K.relative(n.cwd,p),A=await oe.readFilePromise(p)}else{let p;if(this.name.match(/^https?:/)){try{new URL(this.name)}catch{throw new Jt(52,`Plugin specifier "${this.name}" is neither a plugin name nor a valid url`)}u=this.name,p=this.name}else{let h=G.parseLocator(this.name.replace(/^((@yarnpkg\/)?plugin-)?/,"@yarnpkg/plugin-"));if(h.reference!=="unknown"&&!ode.default.valid(h.reference))throw new Jt(0,"Official plugins only accept strict version references. Use an explicit URL if you wish to download them from another location.");let E=G.stringifyIdent(h),I=await Hg(r,nn);if(!Object.hasOwn(I,E)){let v=`Couldn't find a plugin named ${G.prettyIdent(r,h)} on the remote registry. +`;throw r.plugins.has(E)?v+=`A plugin named ${G.prettyIdent(r,h)} is already installed; possibly attempting to import a built-in plugin.`:v+=`Note that only the plugins referenced on our website (${pe.pretty(r,"https://github.com/yarnpkg/berry/blob/master/plugins.yml",pe.Type.URL)}) can be referenced by their name; any other plugin will have to be referenced through its public url (for example ${pe.pretty(r,"https://github.com/yarnpkg/berry/raw/master/packages/plugin-typescript/bin/%40yarnpkg/plugin-typescript.js",pe.Type.URL)}).`,new Jt(51,v)}u=E,p=I[E].url,h.reference!=="unknown"?p=p.replace(/\/master\//,`/${E}/${h.reference}/`):nn!==null&&(p=p.replace(/\/master\//,`/@yarnpkg/cli/${nn}/`))}a.reportInfo(0,`Downloading ${pe.pretty(r,p,"green")}`),A=await sn.get(p,{configuration:r})}await T8(u,A,{checksum:this.checksum,project:n,report:a})})).exitCode()}};async function T8(t,e,{checksum:r=!0,project:o,report:a}){let{configuration:n}=o,u={},A={exports:u};(0,ade.runInNewContext)(e.toString(),{module:A,exports:u});let h=`.yarn/plugins/${A.exports.name}.cjs`,E=K.resolve(o.cwd,h);a.reportInfo(0,`Saving the new plugin in ${pe.pretty(n,h,"magenta")}`),await oe.mkdirPromise(K.dirname(E),{recursive:!0}),await oe.writeFilePromise(E,e);let I={path:h,spec:t};r&&(I.checksum=wn.makeHash(e)),await Ke.addPlugin(o.cwd,[I])}var agt=({pluginName:t,noMinify:e},r)=>[["yarn",`build:${t}`,...e?["--no-minify"]:[],"|"]],gE=class extends ut{constructor(){super(...arguments);this.installPath=ge.String("--path",{description:"The path where the repository should be cloned to"});this.repository=ge.String("--repository","https://github.com/yarnpkg/berry.git",{description:"The repository that should be cloned"});this.branch=ge.String("--branch","master",{description:"The branch of the repository that should be cloned"});this.noMinify=ge.Boolean("--no-minify",!1,{description:"Build a plugin for development (debugging) - non-minified and non-mangled"});this.force=ge.Boolean("-f,--force",!1,{description:"Always clone the repository instead of trying to fetch the latest commits"});this.name=ge.String()}static{this.paths=[["plugin","import","from","sources"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"build a plugin from sources",details:` + This command clones the Yarn repository into a temporary folder, builds the specified contrib plugin and updates the configuration to reference it in further CLI invocations. + + The plugins can be referenced by their short name if sourced from the official Yarn repository. + `,examples:[['Build and activate the "@yarnpkg/plugin-exec" plugin',"$0 plugin import from sources @yarnpkg/plugin-exec"],['Build and activate the "@yarnpkg/plugin-exec" plugin (shorthand)',"$0 plugin import from sources exec"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=typeof this.installPath<"u"?K.resolve(this.context.cwd,ue.toPortablePath(this.installPath)):K.resolve(ue.toPortablePath((0,lde.tmpdir)()),"yarnpkg-sources",wn.makeHash(this.repository).slice(0,6));return(await Rt.start({configuration:r,stdout:this.context.stdout},async n=>{let{project:u}=await kt.find(r,this.context.cwd),A=G.parseIdent(this.name.replace(/^((@yarnpkg\/)?plugin-)?/,"@yarnpkg/plugin-")),p=G.stringifyIdent(A),h=await Hg(r,nn);if(!Object.hasOwn(h,p))throw new Jt(51,`Couldn't find a plugin named "${p}" on the remote registry. Note that only the plugins referenced on our website (https://github.com/yarnpkg/berry/blob/master/plugins.yml) can be built and imported from sources.`);let E=p;await F8(this,{configuration:r,report:n,target:o}),await R8(E,this,{project:u,report:n,target:o})})).exitCode()}};async function R8(t,{context:e,noMinify:r},{project:o,report:a,target:n}){let u=t.replace(/@yarnpkg\//,""),{configuration:A}=o;a.reportSeparator(),a.reportInfo(0,`Building a fresh ${u}`),a.reportSeparator(),await n2(agt({pluginName:u,noMinify:r},n),{configuration:A,context:e,target:n}),a.reportSeparator();let p=K.resolve(n,`packages/${u}/bundles/${t}.js`),h=await oe.readFilePromise(p);await T8(t,h,{project:o,report:a})}Ge();Pt();qt();var dE=class extends ut{constructor(){super(...arguments);this.name=ge.String()}static{this.paths=[["plugin","remove"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"remove a plugin",details:` + This command deletes the specified plugin from the .yarn/plugins folder and removes it from the configuration. + + **Note:** The plugins have to be referenced by their name property, which can be obtained using the \`yarn plugin runtime\` command. Shorthands are not allowed. + `,examples:[["Remove a plugin imported from the Yarn repository","$0 plugin remove @yarnpkg/plugin-typescript"],["Remove a plugin imported from a local file","$0 plugin remove my-local-plugin"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd);return(await Rt.start({configuration:r,stdout:this.context.stdout},async n=>{let u=this.name,A=G.parseIdent(u);if(!r.plugins.has(u))throw new st(`${G.prettyIdent(r,A)} isn't referenced by the current configuration`);let p=`.yarn/plugins/${u}.cjs`,h=K.resolve(o.cwd,p);oe.existsSync(h)&&(n.reportInfo(0,`Removing ${pe.pretty(r,p,pe.Type.PATH)}...`),await oe.removePromise(h)),n.reportInfo(0,"Updating the configuration..."),await Ke.updateConfiguration(o.cwd,{plugins:E=>{if(!Array.isArray(E))return E;let I=E.filter(v=>v.path!==p);return I.length===0?Ke.deleteProperty:I.length===E.length?E:I}})})).exitCode()}};Ge();qt();var mE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["plugin","runtime"]]}static{this.usage=it.Usage({category:"Plugin-related commands",description:"list the active plugins",details:` + This command prints the currently active plugins. Will be displayed both builtin plugins and external plugins. + `,examples:[["List the currently active plugins","$0 plugin runtime"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins);return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async a=>{for(let n of r.plugins.keys()){let u=this.context.plugins.plugins.has(n),A=n;u&&(A+=" [builtin]"),a.reportJson({name:n,builtin:u}),a.reportInfo(null,`${A}`)}})).exitCode()}};Ge();Ge();qt();var yE=class extends ut{constructor(){super(...arguments);this.idents=ge.Rest()}static{this.paths=[["rebuild"]]}static{this.usage=it.Usage({description:"rebuild the project's native packages",details:` + This command will automatically cause Yarn to forget about previous compilations of the given packages and to run them again. + + Note that while Yarn forgets the compilation, the previous artifacts aren't erased from the filesystem and may affect the next builds (in good or bad). To avoid this, you may remove the .yarn/unplugged folder, or any other relevant location where packages might have been stored (Yarn may offer a way to do that automatically in the future). + + By default all packages will be rebuilt, but you can filter the list by specifying the names of the packages you want to clear from memory. + `,examples:[["Rebuild all packages","$0 rebuild"],["Rebuild fsevents only","$0 rebuild fsevents"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);let u=new Set;for(let A of this.idents)u.add(G.parseIdent(A).identHash);if(await o.restoreInstallState({restoreResolutions:!1}),await o.resolveEverything({cache:n,report:new ki}),u.size>0)for(let A of o.storedPackages.values())u.has(A.identHash)&&(o.storedBuildState.delete(A.locatorHash),o.skippedBuilds.delete(A.locatorHash));else o.storedBuildState.clear(),o.skippedBuilds.clear();return await o.installWithNewReport({stdout:this.context.stdout,quiet:this.context.quiet},{cache:n})}};Ge();Ge();Ge();qt();var N8=Ze($o());el();var EE=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Apply the operation to all workspaces from the current project"});this.mode=ge.String("--mode",{description:"Change what artifacts installs generate",validator:Js(hl)});this.patterns=ge.Rest()}static{this.paths=[["remove"]]}static{this.usage=it.Usage({description:"remove dependencies from the project",details:` + This command will remove the packages matching the specified patterns from the current workspace. + + If the \`--mode=\` option is set, Yarn will change which artifacts are generated. The modes currently supported are: + + - \`skip-build\` will not run the build scripts at all. Note that this is different from setting \`enableScripts\` to false because the latter will disable build scripts, and thus affect the content of the artifacts generated on disk, whereas the former will just disable the build step - but not the scripts themselves, which just won't run. + + - \`update-lockfile\` will skip the link step altogether, and only fetch packages that are missing from the lockfile (or that have no associated checksums). This mode is typically used by tools like Renovate or Dependabot to keep a lockfile up-to-date without incurring the full install cost. + + This command accepts glob patterns as arguments (if valid Idents and supported by [micromatch](https://github.com/micromatch/micromatch)). Make sure to escape the patterns, to prevent your own shell from trying to expand them. + `,examples:[["Remove a dependency from the current project","$0 remove lodash"],["Remove a dependency from all workspaces at once","$0 remove lodash --all"],["Remove all dependencies starting with `eslint-`","$0 remove 'eslint-*'"],["Remove all dependencies with the `@babel` scope","$0 remove '@babel/*'"],["Remove all dependencies matching `react-dom` or `react-helmet`","$0 remove 'react-{dom,helmet}'"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=this.all?o.workspaces:[a],A=["dependencies","devDependencies","peerDependencies"],p=[],h=!1,E=[];for(let C of this.patterns){let R=!1,L=G.parseIdent(C);for(let U of u){let z=[...U.manifest.peerDependenciesMeta.keys()];for(let te of(0,N8.default)(z,C))U.manifest.peerDependenciesMeta.delete(te),h=!0,R=!0;for(let te of A){let ae=U.manifest.getForScope(te),le=[...ae.values()].map(ce=>G.stringifyIdent(ce));for(let ce of(0,N8.default)(le,G.stringifyIdent(L))){let{identHash:Ce}=G.parseIdent(ce),de=ae.get(Ce);if(typeof de>"u")throw new Error("Assertion failed: Expected the descriptor to be registered");U.manifest[te].delete(Ce),E.push([U,te,de]),h=!0,R=!0}}}R||p.push(C)}let I=p.length>1?"Patterns":"Pattern",v=p.length>1?"don't":"doesn't",x=this.all?"any":"this";if(p.length>0)throw new st(`${I} ${pe.prettyList(r,p,pe.Type.CODE)} ${v} match any packages referenced by ${x} workspace`);return h?(await r.triggerMultipleHooks(C=>C.afterWorkspaceDependencyRemoval,E),await o.installWithNewReport({stdout:this.context.stdout},{cache:n,mode:this.mode})):0}};Ge();Ge();qt();var cde=ve("util"),CE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["run"]]}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);return(await Rt.start({configuration:r,stdout:this.context.stdout,json:this.json},async u=>{let A=a.manifest.scripts,p=He.sortMap(A.keys(),I=>I),h={breakLength:1/0,colors:r.get("enableColors"),maxArrayLength:2},E=p.reduce((I,v)=>Math.max(I,v.length),0);for(let[I,v]of A.entries())u.reportInfo(null,`${I.padEnd(E," ")} ${(0,cde.inspect)(v,h)}`),u.reportJson({name:I,script:v})})).exitCode()}};Ge();Ge();qt();var wE=class extends ut{constructor(){super(...arguments);this.inspect=ge.String("--inspect",!1,{tolerateBoolean:!0,description:"Forwarded to the underlying Node process when executing a binary"});this.inspectBrk=ge.String("--inspect-brk",!1,{tolerateBoolean:!0,description:"Forwarded to the underlying Node process when executing a binary"});this.topLevel=ge.Boolean("-T,--top-level",!1,{description:"Check the root workspace for scripts and/or binaries instead of the current one"});this.binariesOnly=ge.Boolean("-B,--binaries-only",!1,{description:"Ignore any user defined scripts and only check for binaries"});this.require=ge.String("--require",{description:"Forwarded to the underlying Node process when executing a binary"});this.silent=ge.Boolean("--silent",{hidden:!0});this.scriptName=ge.String();this.args=ge.Proxy()}static{this.paths=[["run"]]}static{this.usage=it.Usage({description:"run a script defined in the package.json",details:` + This command will run a tool. The exact tool that will be executed will depend on the current state of your workspace: + + - If the \`scripts\` field from your local package.json contains a matching script name, its definition will get executed. + + - Otherwise, if one of the local workspace's dependencies exposes a binary with a matching name, this binary will get executed. + + - Otherwise, if the specified name contains a colon character and if one of the workspaces in the project contains exactly one script with a matching name, then this script will get executed. + + Whatever happens, the cwd of the spawned process will be the workspace that declares the script (which makes it possible to call commands cross-workspaces using the third syntax). + `,examples:[["Run the tests from the local workspace","$0 run test"],['Same thing, but without the "run" keyword',"$0 test"],["Inspect Webpack while running","$0 run --inspect-brk webpack"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a,locator:n}=await kt.find(r,this.context.cwd);await o.restoreInstallState();let u=this.topLevel?o.topLevelWorkspace.anchoredLocator:n;if(!this.binariesOnly&&await An.hasPackageScript(u,this.scriptName,{project:o}))return await An.executePackageScript(u,this.scriptName,this.args,{project:o,stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr});let A=await An.getPackageAccessibleBinaries(u,{project:o});if(A.get(this.scriptName)){let h=[];return this.inspect&&(typeof this.inspect=="string"?h.push(`--inspect=${this.inspect}`):h.push("--inspect")),this.inspectBrk&&(typeof this.inspectBrk=="string"?h.push(`--inspect-brk=${this.inspectBrk}`):h.push("--inspect-brk")),this.require&&h.push(`--require=${this.require}`),await An.executePackageAccessibleBinary(u,this.scriptName,this.args,{cwd:this.context.cwd,project:o,stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr,nodeArgs:h,packageAccessibleBinaries:A})}if(!this.topLevel&&!this.binariesOnly&&a&&this.scriptName.includes(":")){let E=(await Promise.all(o.workspaces.map(async I=>I.manifest.scripts.has(this.scriptName)?I:null))).filter(I=>I!==null);if(E.length===1)return await An.executeWorkspaceScript(E[0],this.scriptName,this.args,{stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr})}if(this.topLevel)throw this.scriptName==="node-gyp"?new st(`Couldn't find a script name "${this.scriptName}" in the top-level (used by ${G.prettyLocator(r,n)}). This typically happens because some package depends on "node-gyp" to build itself, but didn't list it in their dependencies. To fix that, please run "yarn add node-gyp" into your top-level workspace. You also can open an issue on the repository of the specified package to suggest them to use an optional peer dependency.`):new st(`Couldn't find a script name "${this.scriptName}" in the top-level (used by ${G.prettyLocator(r,n)}).`);{if(this.scriptName==="global")throw new st("The 'yarn global' commands have been removed in 2.x - consider using 'yarn dlx' or a third-party plugin instead");let h=[this.scriptName].concat(this.args);for(let[E,I]of Uy)for(let v of I)if(h.length>=v.length&&JSON.stringify(h.slice(0,v.length))===JSON.stringify(v))throw new st(`Couldn't find a script named "${this.scriptName}", but a matching command can be found in the ${E} plugin. You can install it with "yarn plugin import ${E}".`);throw new st(`Couldn't find a script named "${this.scriptName}".`)}}};Ge();Ge();qt();var IE=class extends ut{constructor(){super(...arguments);this.descriptor=ge.String();this.resolution=ge.String()}static{this.paths=[["set","resolution"]]}static{this.usage=it.Usage({description:"enforce a package resolution",details:'\n This command updates the resolution table so that `descriptor` is resolved by `resolution`.\n\n Note that by default this command only affect the current resolution table - meaning that this "manual override" will disappear if you remove the lockfile, or if the package disappear from the table. If you wish to make the enforced resolution persist whatever happens, edit the `resolutions` field in your top-level manifest.\n\n Note that no attempt is made at validating that `resolution` is a valid resolution entry for `descriptor`.\n ',examples:[["Force all instances of lodash@npm:^1.2.3 to resolve to 1.5.0","$0 set resolution lodash@npm:^1.2.3 1.5.0"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(await o.restoreInstallState({restoreResolutions:!1}),!a)throw new sr(o.cwd,this.context.cwd);let u=G.parseDescriptor(this.descriptor,!0),A=G.makeDescriptor(u,this.resolution);return o.storedDescriptors.set(u.descriptorHash,u),o.storedDescriptors.set(A.descriptorHash,A),o.resolutionAliases.set(u.descriptorHash,A.descriptorHash),await o.installWithNewReport({stdout:this.context.stdout},{cache:n})}};Ge();Pt();qt();var ude=Ze($o()),BE=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Unlink all workspaces belonging to the target project from the current one"});this.leadingArguments=ge.Rest()}static{this.paths=[["unlink"]]}static{this.usage=it.Usage({description:"disconnect the local project from another one",details:` + This command will remove any resolutions in the project-level manifest that would have been added via a yarn link with similar arguments. + `,examples:[["Unregister a remote workspace in the current project","$0 unlink ~/ts-loader"],["Unregister all workspaces from a remote project in the current project","$0 unlink ~/jest --all"],["Unregister all previously linked workspaces","$0 unlink --all"],["Unregister all workspaces matching a glob","$0 unlink '@babel/*' 'pkg-{a,b}'"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);let u=o.topLevelWorkspace,A=new Set;if(this.leadingArguments.length===0&&this.all)for(let{pattern:p,reference:h}of u.manifest.resolutions)h.startsWith("portal:")&&A.add(p.descriptor.fullName);if(this.leadingArguments.length>0)for(let p of this.leadingArguments){let h=K.resolve(this.context.cwd,ue.toPortablePath(p));if(He.isPathLike(p)){let E=await Ke.find(h,this.context.plugins,{useRc:!1,strict:!1}),{project:I,workspace:v}=await kt.find(E,h);if(!v)throw new sr(I.cwd,h);if(this.all){for(let x of I.workspaces)x.manifest.name&&A.add(G.stringifyIdent(x.anchoredLocator));if(A.size===0)throw new st("No workspace found to be unlinked in the target project")}else{if(!v.manifest.name)throw new st("The target workspace doesn't have a name and thus cannot be unlinked");A.add(G.stringifyIdent(v.anchoredLocator))}}else{let E=[...u.manifest.resolutions.map(({pattern:I})=>I.descriptor.fullName)];for(let I of(0,ude.default)(E,p))A.add(I)}}return u.manifest.resolutions=u.manifest.resolutions.filter(({pattern:p})=>!A.has(p.descriptor.fullName)),await o.installWithNewReport({stdout:this.context.stdout,quiet:this.context.quiet},{cache:n})}};Ge();Ge();Ge();qt();var Ade=Ze(J1()),L8=Ze($o());el();var vE=class extends ut{constructor(){super(...arguments);this.interactive=ge.Boolean("-i,--interactive",{description:"Offer various choices, depending on the detected upgrade paths"});this.fixed=ge.Boolean("-F,--fixed",!1,{description:"Store dependency tags as-is instead of resolving them"});this.exact=ge.Boolean("-E,--exact",!1,{description:"Don't use any semver modifier on the resolved range"});this.tilde=ge.Boolean("-T,--tilde",!1,{description:"Use the `~` semver modifier on the resolved range"});this.caret=ge.Boolean("-C,--caret",!1,{description:"Use the `^` semver modifier on the resolved range"});this.recursive=ge.Boolean("-R,--recursive",!1,{description:"Resolve again ALL resolutions for those packages"});this.mode=ge.String("--mode",{description:"Change what artifacts installs generate",validator:Js(hl)});this.patterns=ge.Rest()}static{this.paths=[["up"]]}static{this.usage=it.Usage({description:"upgrade dependencies across the project",details:"\n This command upgrades the packages matching the list of specified patterns to their latest available version across the whole project (regardless of whether they're part of `dependencies` or `devDependencies` - `peerDependencies` won't be affected). This is a project-wide command: all workspaces will be upgraded in the process.\n\n If `-R,--recursive` is set the command will change behavior and no other switch will be allowed. When operating under this mode `yarn up` will force all ranges matching the selected packages to be resolved again (often to the highest available versions) before being stored in the lockfile. It however won't touch your manifests anymore, so depending on your needs you might want to run both `yarn up` and `yarn up -R` to cover all bases.\n\n If `-i,--interactive` is set (or if the `preferInteractive` settings is toggled on) the command will offer various choices, depending on the detected upgrade paths. Some upgrades require this flag in order to resolve ambiguities.\n\n The, `-C,--caret`, `-E,--exact` and `-T,--tilde` options have the same meaning as in the `add` command (they change the modifier used when the range is missing or a tag, and are ignored when the range is explicitly set).\n\n If the `--mode=` option is set, Yarn will change which artifacts are generated. The modes currently supported are:\n\n - `skip-build` will not run the build scripts at all. Note that this is different from setting `enableScripts` to false because the latter will disable build scripts, and thus affect the content of the artifacts generated on disk, whereas the former will just disable the build step - but not the scripts themselves, which just won't run.\n\n - `update-lockfile` will skip the link step altogether, and only fetch packages that are missing from the lockfile (or that have no associated checksums). This mode is typically used by tools like Renovate or Dependabot to keep a lockfile up-to-date without incurring the full install cost.\n\n Generally you can see `yarn up` as a counterpart to what was `yarn upgrade --latest` in Yarn 1 (ie it ignores the ranges previously listed in your manifests), but unlike `yarn upgrade` which only upgraded dependencies in the current workspace, `yarn up` will upgrade all workspaces at the same time.\n\n This command accepts glob patterns as arguments (if valid Descriptors and supported by [micromatch](https://github.com/micromatch/micromatch)). Make sure to escape the patterns, to prevent your own shell from trying to expand them.\n\n **Note:** The ranges have to be static, only the package scopes and names can contain glob patterns.\n ",examples:[["Upgrade all instances of lodash to the latest release","$0 up lodash"],["Upgrade all instances of lodash to the latest release, but ask confirmation for each","$0 up lodash -i"],["Upgrade all instances of lodash to 1.2.3","$0 up lodash@1.2.3"],["Upgrade all instances of packages with the `@babel` scope to the latest release","$0 up '@babel/*'"],["Upgrade all instances of packages containing the word `jest` to the latest release","$0 up '*jest*'"],["Upgrade all instances of packages with the `@babel` scope to 7.0.0","$0 up '@babel/*@7.0.0'"]]})}static{this.schema=[Yw("recursive",Yu.Forbids,["interactive","exact","tilde","caret"],{ignore:[void 0,!1]})]}async execute(){return this.recursive?await this.executeUpRecursive():await this.executeUpClassic()}async executeUpRecursive(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=[...o.storedDescriptors.values()],A=u.map(E=>G.stringifyIdent(E)),p=new Set;for(let E of this.patterns){if(G.parseDescriptor(E).range!=="unknown")throw new st("Ranges aren't allowed when using --recursive");for(let I of(0,L8.default)(A,E)){let v=G.parseIdent(I);p.add(v.identHash)}}let h=u.filter(E=>p.has(E.identHash));for(let E of h)o.storedDescriptors.delete(E.descriptorHash),o.storedResolutions.delete(E.descriptorHash);return await o.installWithNewReport({stdout:this.context.stdout},{cache:n,mode:this.mode})}async executeUpClassic(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=this.fixed,A=r.isInteractive({interactive:this.interactive,stdout:this.context.stdout}),p=Z1(this,o),h=A?["keep","reuse","project","latest"]:["project","latest"],E=[],I=[];for(let L of this.patterns){let U=!1,z=G.parseDescriptor(L),te=G.stringifyIdent(z);for(let ae of o.workspaces)for(let le of["dependencies","devDependencies"]){let Ce=[...ae.manifest.getForScope(le).values()].map(Be=>G.stringifyIdent(Be)),de=te==="*"?Ce:(0,L8.default)(Ce,te);for(let Be of de){let Ee=G.parseIdent(Be),g=ae.manifest[le].get(Ee.identHash);if(typeof g>"u")throw new Error("Assertion failed: Expected the descriptor to be registered");let me=G.makeDescriptor(Ee,z.range);E.push(Promise.resolve().then(async()=>[ae,le,g,await $1(me,{project:o,workspace:ae,cache:n,target:le,fixed:u,modifier:p,strategies:h})])),U=!0}}U||I.push(L)}if(I.length>1)throw new st(`Patterns ${pe.prettyList(r,I,pe.Type.CODE)} don't match any packages referenced by any workspace`);if(I.length>0)throw new st(`Pattern ${pe.prettyList(r,I,pe.Type.CODE)} doesn't match any packages referenced by any workspace`);let v=await Promise.all(E),x=await AA.start({configuration:r,stdout:this.context.stdout,suggestInstall:!1},async L=>{for(let[,,U,{suggestions:z,rejections:te}]of v){let ae=z.filter(le=>le.descriptor!==null);if(ae.length===0){let[le]=te;if(typeof le>"u")throw new Error("Assertion failed: Expected an error to have been set");let ce=this.cli.error(le);o.configuration.get("enableNetwork")?L.reportError(27,`${G.prettyDescriptor(r,U)} can't be resolved to a satisfying range + +${ce}`):L.reportError(27,`${G.prettyDescriptor(r,U)} can't be resolved to a satisfying range (note: network resolution has been disabled) + +${ce}`)}else ae.length>1&&!A&&L.reportError(27,`${G.prettyDescriptor(r,U)} has multiple possible upgrade strategies; use -i to disambiguate manually`)}});if(x.hasErrors())return x.exitCode();let C=!1,R=[];for(let[L,U,,{suggestions:z}]of v){let te,ae=z.filter(de=>de.descriptor!==null),le=ae[0].descriptor,ce=ae.every(de=>G.areDescriptorsEqual(de.descriptor,le));ae.length===1||ce?te=le:(C=!0,{answer:te}=await(0,Ade.prompt)({type:"select",name:"answer",message:`Which range do you want to use in ${G.prettyWorkspace(r,L)} \u276F ${U}?`,choices:z.map(({descriptor:de,name:Be,reason:Ee})=>de?{name:Be,hint:Ee,descriptor:de}:{name:Be,hint:Ee,disabled:!0}),onCancel:()=>process.exit(130),result(de){return this.find(de,"descriptor")},stdin:this.context.stdin,stdout:this.context.stdout}));let Ce=L.manifest[U].get(te.identHash);if(typeof Ce>"u")throw new Error("Assertion failed: This descriptor should have a matching entry");if(Ce.descriptorHash!==te.descriptorHash)L.manifest[U].set(te.identHash,te),R.push([L,U,Ce,te]);else{let de=r.makeResolver(),Be={project:o,resolver:de},Ee=r.normalizeDependency(Ce),g=de.bindDescriptor(Ee,L.anchoredLocator,Be);o.forgetResolution(g)}}return await r.triggerMultipleHooks(L=>L.afterWorkspaceDependencyReplacement,R),C&&this.context.stdout.write(` +`),await o.installWithNewReport({stdout:this.context.stdout},{cache:n,mode:this.mode})}};Ge();Ge();Ge();qt();var DE=class extends ut{constructor(){super(...arguments);this.recursive=ge.Boolean("-R,--recursive",!1,{description:"List, for each workspace, what are all the paths that lead to the dependency"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.peers=ge.Boolean("--peers",!1,{description:"Also print the peer dependencies that match the specified name"});this.package=ge.String()}static{this.paths=[["why"]]}static{this.usage=it.Usage({description:"display the reason why a package is needed",details:` + This command prints the exact reasons why a package appears in the dependency tree. + + If \`-R,--recursive\` is set, the listing will go in depth and will list, for each workspaces, what are all the paths that lead to the dependency. Note that the display is somewhat optimized in that it will not print the package listing twice for a single package, so if you see a leaf named "Foo" when looking for "Bar", it means that "Foo" already got printed higher in the tree. + `,examples:[["Explain why lodash is used in your project","$0 why lodash"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let n=G.parseIdent(this.package).identHash,u=this.recursive?cgt(o,n,{configuration:r,peers:this.peers}):lgt(o,n,{configuration:r,peers:this.peers});fs.emitTree(u,{configuration:r,stdout:this.context.stdout,json:this.json,separators:1})}};function lgt(t,e,{configuration:r,peers:o}){let a=He.sortMap(t.storedPackages.values(),A=>G.stringifyLocator(A)),n={},u={children:n};for(let A of a){let p={};for(let E of A.dependencies.values()){if(!o&&A.peerDependencies.has(E.identHash))continue;let I=t.storedResolutions.get(E.descriptorHash);if(!I)throw new Error("Assertion failed: The resolution should have been registered");let v=t.storedPackages.get(I);if(!v)throw new Error("Assertion failed: The package should have been registered");if(v.identHash!==e)continue;{let C=G.stringifyLocator(A);n[C]={value:[A,pe.Type.LOCATOR],children:p}}let x=G.stringifyLocator(v);p[x]={value:[{descriptor:E,locator:v},pe.Type.DEPENDENT]}}}return u}function cgt(t,e,{configuration:r,peers:o}){let a=He.sortMap(t.workspaces,v=>G.stringifyLocator(v.anchoredLocator)),n=new Set,u=new Set,A=v=>{if(n.has(v.locatorHash))return u.has(v.locatorHash);if(n.add(v.locatorHash),v.identHash===e)return u.add(v.locatorHash),!0;let x=!1;v.identHash===e&&(x=!0);for(let C of v.dependencies.values()){if(!o&&v.peerDependencies.has(C.identHash))continue;let R=t.storedResolutions.get(C.descriptorHash);if(!R)throw new Error("Assertion failed: The resolution should have been registered");let L=t.storedPackages.get(R);if(!L)throw new Error("Assertion failed: The package should have been registered");A(L)&&(x=!0)}return x&&u.add(v.locatorHash),x};for(let v of a)A(v.anchoredPackage);let p=new Set,h={},E={children:h},I=(v,x,C)=>{if(!u.has(v.locatorHash))return;let R=C!==null?pe.tuple(pe.Type.DEPENDENT,{locator:v,descriptor:C}):pe.tuple(pe.Type.LOCATOR,v),L={},U={value:R,children:L},z=G.stringifyLocator(v);if(x[z]=U,!(C!==null&&t.tryWorkspaceByLocator(v))&&!p.has(v.locatorHash)){p.add(v.locatorHash);for(let te of v.dependencies.values()){if(!o&&v.peerDependencies.has(te.identHash))continue;let ae=t.storedResolutions.get(te.descriptorHash);if(!ae)throw new Error("Assertion failed: The resolution should have been registered");let le=t.storedPackages.get(ae);if(!le)throw new Error("Assertion failed: The package should have been registered");I(le,L,te)}}};for(let v of a)I(v.anchoredPackage,h,null);return E}Ge();var W8={};Vt(W8,{GitFetcher:()=>s2,GitResolver:()=>o2,default:()=>kgt,gitUtils:()=>ia});Ge();Pt();var ia={};Vt(ia,{TreeishProtocols:()=>i2,clone:()=>Y8,fetchBase:()=>Rde,fetchChangedFiles:()=>Tde,fetchChangedWorkspaces:()=>bgt,fetchRoot:()=>Fde,isGitUrl:()=>bE,lsRemote:()=>Qde,normalizeLocator:()=>Sgt,normalizeRepoUrl:()=>PE,resolveUrl:()=>G8,splitRepoUrl:()=>bh,validateRepoUrl:()=>j8});Ge();Pt();qt();var bde=Ze(Dde()),xde=Ze(uU()),SE=Ze(ve("querystring")),H8=Ze(Jn());function _8(t,e,r){let o=t.indexOf(r);return t.lastIndexOf(e,o>-1?o:1/0)}function Pde(t){try{return new URL(t)}catch{return}}function Dgt(t){let e=_8(t,"@","#"),r=_8(t,":","#");return r>e&&(t=`${t.slice(0,r)}/${t.slice(r+1)}`),_8(t,":","#")===-1&&t.indexOf("//")===-1&&(t=`ssh://${t}`),t}function Sde(t){return Pde(t)||Pde(Dgt(t))}function PE(t,{git:e=!1}={}){if(t=t.replace(/^git\+https:/,"https:"),t=t.replace(/^(?:github:|https:\/\/github\.com\/|git:\/\/github\.com\/)?(?!\.{1,2}\/)([a-zA-Z0-9._-]+)\/(?!\.{1,2}(?:#|$))([a-zA-Z0-9._-]+?)(?:\.git)?(#.*)?$/,"https://github.com/$1/$2.git$3"),t=t.replace(/^https:\/\/github\.com\/(?!\.{1,2}\/)([a-zA-Z0-9._-]+)\/(?!\.{1,2}(?:#|$))([a-zA-Z0-9._-]+?)\/tarball\/(.+)?$/,"https://github.com/$1/$2.git#$3"),e){let r=Sde(t);r&&(t=r.href),t=t.replace(/^git\+([^:]+):/,"$1:")}return t}function kde(){return{...process.env,GIT_SSH_COMMAND:process.env.GIT_SSH_COMMAND||`${process.env.GIT_SSH||"ssh"} -o BatchMode=yes`}}var Pgt=[/^ssh:/,/^git(?:\+[^:]+)?:/,/^(?:git\+)?https?:[^#]+\/[^#]+(?:\.git)(?:#.*)?$/,/^git@[^#]+\/[^#]+\.git(?:#.*)?$/,/^(?:github:|https:\/\/github\.com\/)?(?!\.{1,2}\/)([a-zA-Z._0-9-]+)\/(?!\.{1,2}(?:#|$))([a-zA-Z._0-9-]+?)(?:\.git)?(?:#.*)?$/,/^https:\/\/github\.com\/(?!\.{1,2}\/)([a-zA-Z0-9._-]+)\/(?!\.{1,2}(?:#|$))([a-zA-Z0-9._-]+?)\/tarball\/(.+)?$/],i2=(a=>(a.Commit="commit",a.Head="head",a.Tag="tag",a.Semver="semver",a))(i2||{});function bE(t){return t?Pgt.some(e=>!!t.match(e)):!1}function bh(t){t=PE(t);let e=t.indexOf("#");if(e===-1)return{repo:t,treeish:{protocol:"head",request:"HEAD"},extra:{}};let r=t.slice(0,e),o=t.slice(e+1);if(o.match(/^[a-z]+=/)){let a=SE.default.parse(o);for(let[p,h]of Object.entries(a))if(typeof h!="string")throw new Error(`Assertion failed: The ${p} parameter must be a literal string`);let n=Object.values(i2).find(p=>Object.hasOwn(a,p)),[u,A]=typeof n<"u"?[n,a[n]]:["head","HEAD"];for(let p of Object.values(i2))delete a[p];return{repo:r,treeish:{protocol:u,request:A},extra:a}}else{let a=o.indexOf(":"),[n,u]=a===-1?[null,o]:[o.slice(0,a),o.slice(a+1)];return{repo:r,treeish:{protocol:n,request:u},extra:{}}}}function Sgt(t){return G.makeLocator(t,PE(t.reference))}function j8(t,{configuration:e}){let r=PE(t,{git:!0});if(!sn.getNetworkSettings(`https://${(0,bde.default)(r).resource}`,{configuration:e}).enableNetwork)throw new Jt(80,`Request to '${r}' has been blocked because of your configuration settings`);return r}async function Qde(t,e){let r=j8(t,{configuration:e}),o=await q8("listing refs",["ls-remote",r],{cwd:e.startingCwd,env:kde()},{configuration:e,normalizedRepoUrl:r}),a=new Map,n=/^([a-f0-9]{40})\t([^\n]+)/gm,u;for(;(u=n.exec(o.stdout))!==null;)a.set(u[2],u[1]);return a}async function G8(t,e){let{repo:r,treeish:{protocol:o,request:a},extra:n}=bh(t),u=await Qde(r,e),A=(h,E)=>{switch(h){case"commit":{if(!E.match(/^[a-f0-9]{40}$/))throw new Error("Invalid commit hash");return SE.default.stringify({...n,commit:E})}case"head":{let I=u.get(E==="HEAD"?E:`refs/heads/${E}`);if(typeof I>"u")throw new Error(`Unknown head ("${E}")`);return SE.default.stringify({...n,commit:I})}case"tag":{let I=u.get(`refs/tags/${E}`);if(typeof I>"u")throw new Error(`Unknown tag ("${E}")`);return SE.default.stringify({...n,commit:I})}case"semver":{let I=Lr.validRange(E);if(!I)throw new Error(`Invalid range ("${E}")`);let v=new Map([...u.entries()].filter(([C])=>C.startsWith("refs/tags/")).map(([C,R])=>[H8.default.parse(C.slice(10)),R]).filter(C=>C[0]!==null)),x=H8.default.maxSatisfying([...v.keys()],I);if(x===null)throw new Error(`No matching range ("${E}")`);return SE.default.stringify({...n,commit:v.get(x)})}case null:{let I;if((I=p("commit",E))!==null||(I=p("tag",E))!==null||(I=p("head",E))!==null)return I;throw E.match(/^[a-f0-9]+$/)?new Error(`Couldn't resolve "${E}" as either a commit, a tag, or a head - if a commit, use the 40-characters commit hash`):new Error(`Couldn't resolve "${E}" as either a commit, a tag, or a head`)}default:throw new Error(`Invalid Git resolution protocol ("${h}")`)}},p=(h,E)=>{try{return A(h,E)}catch{return null}};return PE(`${r}#${A(o,a)}`)}async function Y8(t,e){return await e.getLimit("cloneConcurrency")(async()=>{let{repo:r,treeish:{protocol:o,request:a}}=bh(t);if(o!=="commit")throw new Error("Invalid treeish protocol when cloning");let n=j8(r,{configuration:e}),u=await oe.mktempPromise(),A={cwd:u,env:kde()};return await q8("cloning the repository",["clone","-c core.autocrlf=false",n,ue.fromPortablePath(u)],A,{configuration:e,normalizedRepoUrl:n}),await q8("switching branch",["checkout",`${a}`],A,{configuration:e,normalizedRepoUrl:n}),u})}async function Fde(t){let e,r=t;do{if(e=r,await oe.existsPromise(K.join(e,".git")))return e;r=K.dirname(e)}while(r!==e);return null}async function Rde(t,{baseRefs:e}){if(e.length===0)throw new st("Can't run this command with zero base refs specified.");let r=[];for(let A of e){let{code:p}=await Ur.execvp("git",["merge-base",A,"HEAD"],{cwd:t});p===0&&r.push(A)}if(r.length===0)throw new st(`No ancestor could be found between any of HEAD and ${e.join(", ")}`);let{stdout:o}=await Ur.execvp("git",["merge-base","HEAD",...r],{cwd:t,strict:!0}),a=o.trim(),{stdout:n}=await Ur.execvp("git",["show","--quiet","--pretty=format:%s",a],{cwd:t,strict:!0}),u=n.trim();return{hash:a,title:u}}async function Tde(t,{base:e,project:r}){let o=He.buildIgnorePattern(r.configuration.get("changesetIgnorePatterns")),{stdout:a}=await Ur.execvp("git",["diff","--name-only",`${e}`],{cwd:t,strict:!0}),n=a.split(/\r\n|\r|\n/).filter(h=>h.length>0).map(h=>K.resolve(t,ue.toPortablePath(h))),{stdout:u}=await Ur.execvp("git",["ls-files","--others","--exclude-standard"],{cwd:t,strict:!0}),A=u.split(/\r\n|\r|\n/).filter(h=>h.length>0).map(h=>K.resolve(t,ue.toPortablePath(h))),p=[...new Set([...n,...A].sort())];return o?p.filter(h=>!K.relative(r.cwd,h).match(o)):p}async function bgt({ref:t,project:e}){if(e.configuration.projectCwd===null)throw new st("This command can only be run from within a Yarn project");let r=[K.resolve(e.cwd,dr.lockfile),K.resolve(e.cwd,e.configuration.get("cacheFolder")),K.resolve(e.cwd,e.configuration.get("installStatePath")),K.resolve(e.cwd,e.configuration.get("virtualFolder"))];await e.configuration.triggerHook(u=>u.populateYarnPaths,e,u=>{u!=null&&r.push(u)});let o=await Fde(e.configuration.projectCwd);if(o==null)throw new st("This command can only be run on Git repositories");let a=await Rde(o,{baseRefs:typeof t=="string"?[t]:e.configuration.get("changesetBaseRefs")}),n=await Tde(o,{base:a.hash,project:e});return new Set(He.mapAndFilter(n,u=>{let A=e.tryWorkspaceByFilePath(u);return A===null?He.mapAndFilter.skip:r.some(p=>u.startsWith(p))?He.mapAndFilter.skip:A}))}async function q8(t,e,r,{configuration:o,normalizedRepoUrl:a}){try{return await Ur.execvp("git",e,{...r,strict:!0})}catch(n){if(!(n instanceof Ur.ExecError))throw n;let u=n.reportExtra,A=n.stderr.toString();throw new Jt(1,`Failed ${t}`,p=>{p.reportError(1,` ${pe.prettyField(o,{label:"Repository URL",value:pe.tuple(pe.Type.URL,a)})}`);for(let h of A.matchAll(/^(.+?): (.*)$/gm)){let[,E,I]=h;E=E.toLowerCase();let v=E==="error"?"Error":`${(0,xde.default)(E)} Error`;p.reportError(1,` ${pe.prettyField(o,{label:v,value:pe.tuple(pe.Type.NO_HINT,I)})}`)}u?.(p)})}}var s2=class{supports(e,r){return bE(e.reference)}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,a=new Map(r.checksums);a.set(e.locatorHash,o);let n={...r,checksums:a},u=await this.downloadHosted(e,n);if(u!==null)return u;let[A,p,h]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the remote repository`),loader:()=>this.cloneFromRemote(e,n),...r.cacheOptions});return{packageFs:A,releaseFs:p,prefixPath:G.getIdentVendorPath(e),checksum:h}}async downloadHosted(e,r){return r.project.configuration.reduceHook(o=>o.fetchHostedRepository,null,e,r)}async cloneFromRemote(e,r){let o=bh(e.reference),a=await Y8(e.reference,r.project.configuration),n=K.resolve(a,o.extra.cwd??It.dot),u=K.join(n,"package.tgz");await An.prepareExternalProject(n,u,{configuration:r.project.configuration,report:r.report,workspace:o.extra.workspace,locator:e});let A=await oe.readFilePromise(u);return await He.releaseAfterUseAsync(async()=>await $i.convertToZip(A,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1}))}};Ge();Ge();var o2=class{supportsDescriptor(e,r){return bE(e.range)}supportsLocator(e,r){return bE(e.reference)}shouldPersistResolution(e,r){return!0}bindDescriptor(e,r,o){return e}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){let a=await G8(e.range,o.project.configuration);return[G.makeLocator(e,a)]}async getSatisfying(e,r,o,a){let n=bh(e.range);return{locators:o.filter(A=>{if(A.identHash!==e.identHash)return!1;let p=bh(A.reference);return!(n.repo!==p.repo||n.treeish.protocol==="commit"&&n.treeish.request!==p.treeish.request)}),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"HARD",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};var xgt={configuration:{changesetBaseRefs:{description:"The base git refs that the current HEAD is compared against when detecting changes. Supports git branches, tags, and commits.",type:"STRING",isArray:!0,isNullable:!1,default:["master","origin/master","upstream/master","main","origin/main","upstream/main"]},changesetIgnorePatterns:{description:"Array of glob patterns; files matching them will be ignored when fetching the changed files",type:"STRING",default:[],isArray:!0},cloneConcurrency:{description:"Maximal number of concurrent clones",type:"NUMBER",default:2}},fetchers:[s2],resolvers:[o2]};var kgt=xgt;qt();var xE=class extends ut{constructor(){super(...arguments);this.since=ge.String("--since",{description:"Only include workspaces that have been changed since the specified ref.",tolerateBoolean:!0});this.recursive=ge.Boolean("-R,--recursive",!1,{description:"Find packages via dependencies/devDependencies instead of using the workspaces field"});this.noPrivate=ge.Boolean("--no-private",{description:"Exclude workspaces that have the private field set to true"});this.verbose=ge.Boolean("-v,--verbose",!1,{description:"Also return the cross-dependencies between workspaces"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["workspaces","list"]]}static{this.usage=it.Usage({category:"Workspace-related commands",description:"list all available workspaces",details:"\n This command will print the list of all workspaces in the project.\n\n - If `--since` is set, Yarn will only list workspaces that have been modified since the specified ref. By default Yarn will use the refs specified by the `changesetBaseRefs` configuration option.\n\n - If `-R,--recursive` is set, Yarn will find workspaces to run the command on by recursively evaluating `dependencies` and `devDependencies` fields, instead of looking at the `workspaces` fields.\n\n - If `--no-private` is set, Yarn will not list any workspaces that have the `private` field set to `true`.\n\n - If both the `-v,--verbose` and `--json` options are set, Yarn will also return the cross-dependencies between each workspaces (useful when you wish to automatically generate Buck / Bazel rules).\n "})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd);return(await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async n=>{let u=this.since?await ia.fetchChangedWorkspaces({ref:this.since,project:o}):o.workspaces,A=new Set(u);if(this.recursive)for(let p of[...u].map(h=>h.getRecursiveWorkspaceDependents()))for(let h of p)A.add(h);for(let p of A){let{manifest:h}=p;if(h.private&&this.noPrivate)continue;let E;if(this.verbose){let I=new Set,v=new Set;for(let x of Ut.hardDependencies)for(let[C,R]of h.getForScope(x)){let L=o.tryWorkspaceByDescriptor(R);L===null?o.workspacesByIdent.has(C)&&v.add(R):I.add(L)}E={workspaceDependencies:Array.from(I).map(x=>x.relativeCwd),mismatchedWorkspaceDependencies:Array.from(v).map(x=>G.stringifyDescriptor(x))}}n.reportInfo(null,`${p.relativeCwd}`),n.reportJson({location:p.relativeCwd,name:h.name?G.stringifyIdent(h.name):null,...E})}})).exitCode()}};Ge();Ge();qt();var kE=class extends ut{constructor(){super(...arguments);this.workspaceName=ge.String();this.commandName=ge.String();this.args=ge.Proxy()}static{this.paths=[["workspace"]]}static{this.usage=it.Usage({category:"Workspace-related commands",description:"run a command within the specified workspace",details:` + This command will run a given sub-command on a single workspace. + `,examples:[["Add a package to a single workspace","yarn workspace components add -D react"],["Run build script on a single workspace","yarn workspace components run build"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);let n=o.workspaces,u=new Map(n.map(p=>[G.stringifyIdent(p.anchoredLocator),p])),A=u.get(this.workspaceName);if(A===void 0){let p=Array.from(u.keys()).sort();throw new st(`Workspace '${this.workspaceName}' not found. Did you mean any of the following: + - ${p.join(` + - `)}?`)}return this.cli.run([this.commandName,...this.args],{cwd:A.cwd})}};var Qgt={configuration:{enableImmutableInstalls:{description:"If true (the default on CI), prevents the install command from modifying the lockfile",type:"BOOLEAN",default:Nde.isCI},defaultSemverRangePrefix:{description:"The default save prefix: '^', '~' or ''",type:"STRING",values:["^","~",""],default:"^"},preferReuse:{description:"If true, `yarn add` will attempt to reuse the most common dependency range in other workspaces.",type:"BOOLEAN",default:!1}},commands:[Ky,Vy,zy,Jy,IE,pE,sE,xE,$y,eE,tE,rE,Yy,Wy,Xy,Zy,nE,iE,oE,aE,lE,cE,BE,uE,AE,gE,hE,dE,fE,mE,yE,EE,CE,wE,vE,DE,kE]},Fgt=Qgt;var Z8={};Vt(Z8,{default:()=>Tgt});Ge();var xt={optional:!0},V8=[["@tailwindcss/aspect-ratio@<0.2.1",{peerDependencies:{tailwindcss:"^2.0.2"}}],["@tailwindcss/line-clamp@<0.2.1",{peerDependencies:{tailwindcss:"^2.0.2"}}],["@fullhuman/postcss-purgecss@3.1.3 || 3.1.3-alpha.0",{peerDependencies:{postcss:"^8.0.0"}}],["@samverschueren/stream-to-observable@<0.3.1",{peerDependenciesMeta:{rxjs:xt,zenObservable:xt}}],["any-observable@<0.5.1",{peerDependenciesMeta:{rxjs:xt,zenObservable:xt}}],["@pm2/agent@<1.0.4",{dependencies:{debug:"*"}}],["debug@<4.2.0",{peerDependenciesMeta:{"supports-color":xt}}],["got@<11",{dependencies:{"@types/responselike":"^1.0.0","@types/keyv":"^3.1.1"}}],["cacheable-lookup@<4.1.2",{dependencies:{"@types/keyv":"^3.1.1"}}],["http-link-dataloader@*",{peerDependencies:{graphql:"^0.13.1 || ^14.0.0"}}],["typescript-language-server@*",{dependencies:{"vscode-jsonrpc":"^5.0.1","vscode-languageserver-protocol":"^3.15.0"}}],["postcss-syntax@*",{peerDependenciesMeta:{"postcss-html":xt,"postcss-jsx":xt,"postcss-less":xt,"postcss-markdown":xt,"postcss-scss":xt}}],["jss-plugin-rule-value-function@<=10.1.1",{dependencies:{"tiny-warning":"^1.0.2"}}],["ink-select-input@<4.1.0",{peerDependencies:{react:"^16.8.2"}}],["license-webpack-plugin@<2.3.18",{peerDependenciesMeta:{webpack:xt}}],["snowpack@>=3.3.0",{dependencies:{"node-gyp":"^7.1.0"}}],["promise-inflight@*",{peerDependenciesMeta:{bluebird:xt}}],["reactcss@*",{peerDependencies:{react:"*"}}],["react-color@<=2.19.0",{peerDependencies:{react:"*"}}],["gatsby-plugin-i18n@*",{dependencies:{ramda:"^0.24.1"}}],["useragent@^2.0.0",{dependencies:{request:"^2.88.0",yamlparser:"0.0.x",semver:"5.5.x"}}],["@apollographql/apollo-tools@<=0.5.2",{peerDependencies:{graphql:"^14.2.1 || ^15.0.0"}}],["material-table@^2.0.0",{dependencies:{"@babel/runtime":"^7.11.2"}}],["@babel/parser@*",{dependencies:{"@babel/types":"^7.8.3"}}],["fork-ts-checker-webpack-plugin@<=6.3.4",{peerDependencies:{eslint:">= 6",typescript:">= 2.7",webpack:">= 4","vue-template-compiler":"*"},peerDependenciesMeta:{eslint:xt,"vue-template-compiler":xt}}],["rc-animate@<=3.1.1",{peerDependencies:{react:">=16.9.0","react-dom":">=16.9.0"}}],["react-bootstrap-table2-paginator@*",{dependencies:{classnames:"^2.2.6"}}],["react-draggable@<=4.4.3",{peerDependencies:{react:">= 16.3.0","react-dom":">= 16.3.0"}}],["apollo-upload-client@<14",{peerDependencies:{graphql:"14 - 15"}}],["react-instantsearch-core@<=6.7.0",{peerDependencies:{algoliasearch:">= 3.1 < 5"}}],["react-instantsearch-dom@<=6.7.0",{dependencies:{"react-fast-compare":"^3.0.0"}}],["ws@<7.2.1",{peerDependencies:{bufferutil:"^4.0.1","utf-8-validate":"^5.0.2"},peerDependenciesMeta:{bufferutil:xt,"utf-8-validate":xt}}],["react-portal@<4.2.2",{peerDependencies:{"react-dom":"^15.0.0-0 || ^16.0.0-0 || ^17.0.0-0"}}],["react-scripts@<=4.0.1",{peerDependencies:{react:"*"}}],["testcafe@<=1.10.1",{dependencies:{"@babel/plugin-transform-for-of":"^7.12.1","@babel/runtime":"^7.12.5"}}],["testcafe-legacy-api@<=4.2.0",{dependencies:{"testcafe-hammerhead":"^17.0.1","read-file-relative":"^1.2.0"}}],["@google-cloud/firestore@<=4.9.3",{dependencies:{protobufjs:"^6.8.6"}}],["gatsby-source-apiserver@*",{dependencies:{"babel-polyfill":"^6.26.0"}}],["@webpack-cli/package-utils@<=1.0.1-alpha.4",{dependencies:{"cross-spawn":"^7.0.3"}}],["gatsby-remark-prismjs@<3.3.28",{dependencies:{lodash:"^4"}}],["gatsby-plugin-favicon@*",{peerDependencies:{webpack:"*"}}],["gatsby-plugin-sharp@<=4.6.0-next.3",{dependencies:{debug:"^4.3.1"}}],["gatsby-react-router-scroll@<=5.6.0-next.0",{dependencies:{"prop-types":"^15.7.2"}}],["@rebass/forms@*",{dependencies:{"@styled-system/should-forward-prop":"^5.0.0"},peerDependencies:{react:"^16.8.6"}}],["rebass@*",{peerDependencies:{react:"^16.8.6"}}],["@ant-design/react-slick@<=0.28.3",{peerDependencies:{react:">=16.0.0"}}],["mqtt@<4.2.7",{dependencies:{duplexify:"^4.1.1"}}],["vue-cli-plugin-vuetify@<=2.0.3",{dependencies:{semver:"^6.3.0"},peerDependenciesMeta:{"sass-loader":xt,"vuetify-loader":xt}}],["vue-cli-plugin-vuetify@<=2.0.4",{dependencies:{"null-loader":"^3.0.0"}}],["vue-cli-plugin-vuetify@>=2.4.3",{peerDependencies:{vue:"*"}}],["@vuetify/cli-plugin-utils@<=0.0.4",{dependencies:{semver:"^6.3.0"},peerDependenciesMeta:{"sass-loader":xt}}],["@vue/cli-plugin-typescript@<=5.0.0-alpha.0",{dependencies:{"babel-loader":"^8.1.0"}}],["@vue/cli-plugin-typescript@<=5.0.0-beta.0",{dependencies:{"@babel/core":"^7.12.16"},peerDependencies:{"vue-template-compiler":"^2.0.0"},peerDependenciesMeta:{"vue-template-compiler":xt}}],["cordova-ios@<=6.3.0",{dependencies:{underscore:"^1.9.2"}}],["cordova-lib@<=10.0.1",{dependencies:{underscore:"^1.9.2"}}],["git-node-fs@*",{peerDependencies:{"js-git":"^0.7.8"},peerDependenciesMeta:{"js-git":xt}}],["consolidate@<0.16.0",{peerDependencies:{mustache:"^3.0.0"},peerDependenciesMeta:{mustache:xt}}],["consolidate@<=0.16.0",{peerDependencies:{velocityjs:"^2.0.1",tinyliquid:"^0.2.34","liquid-node":"^3.0.1",jade:"^1.11.0","then-jade":"*",dust:"^0.3.0","dustjs-helpers":"^1.7.4","dustjs-linkedin":"^2.7.5",swig:"^1.4.2","swig-templates":"^2.0.3","razor-tmpl":"^1.3.1",atpl:">=0.7.6",liquor:"^0.0.5",twig:"^1.15.2",ejs:"^3.1.5",eco:"^1.1.0-rc-3",jazz:"^0.0.18",jqtpl:"~1.1.0",hamljs:"^0.6.2",hamlet:"^0.3.3",whiskers:"^0.4.0","haml-coffee":"^1.14.1","hogan.js":"^3.0.2",templayed:">=0.2.3",handlebars:"^4.7.6",underscore:"^1.11.0",lodash:"^4.17.20",pug:"^3.0.0","then-pug":"*",qejs:"^3.0.5",walrus:"^0.10.1",mustache:"^4.0.1",just:"^0.1.8",ect:"^0.5.9",mote:"^0.2.0",toffee:"^0.3.6",dot:"^1.1.3","bracket-template":"^1.1.5",ractive:"^1.3.12",nunjucks:"^3.2.2",htmling:"^0.0.8","babel-core":"^6.26.3",plates:"~0.4.11","react-dom":"^16.13.1",react:"^16.13.1","arc-templates":"^0.5.3",vash:"^0.13.0",slm:"^2.0.0",marko:"^3.14.4",teacup:"^2.0.0","coffee-script":"^1.12.7",squirrelly:"^5.1.0",twing:"^5.0.2"},peerDependenciesMeta:{velocityjs:xt,tinyliquid:xt,"liquid-node":xt,jade:xt,"then-jade":xt,dust:xt,"dustjs-helpers":xt,"dustjs-linkedin":xt,swig:xt,"swig-templates":xt,"razor-tmpl":xt,atpl:xt,liquor:xt,twig:xt,ejs:xt,eco:xt,jazz:xt,jqtpl:xt,hamljs:xt,hamlet:xt,whiskers:xt,"haml-coffee":xt,"hogan.js":xt,templayed:xt,handlebars:xt,underscore:xt,lodash:xt,pug:xt,"then-pug":xt,qejs:xt,walrus:xt,mustache:xt,just:xt,ect:xt,mote:xt,toffee:xt,dot:xt,"bracket-template":xt,ractive:xt,nunjucks:xt,htmling:xt,"babel-core":xt,plates:xt,"react-dom":xt,react:xt,"arc-templates":xt,vash:xt,slm:xt,marko:xt,teacup:xt,"coffee-script":xt,squirrelly:xt,twing:xt}}],["vue-loader@<=16.3.3",{peerDependencies:{"@vue/compiler-sfc":"^3.0.8",webpack:"^4.1.0 || ^5.0.0-0"},peerDependenciesMeta:{"@vue/compiler-sfc":xt}}],["vue-loader@^16.7.0",{peerDependencies:{"@vue/compiler-sfc":"^3.0.8",vue:"^3.2.13"},peerDependenciesMeta:{"@vue/compiler-sfc":xt,vue:xt}}],["scss-parser@<=1.0.5",{dependencies:{lodash:"^4.17.21"}}],["query-ast@<1.0.5",{dependencies:{lodash:"^4.17.21"}}],["redux-thunk@<=2.3.0",{peerDependencies:{redux:"^4.0.0"}}],["skypack@<=0.3.2",{dependencies:{tar:"^6.1.0"}}],["@npmcli/metavuln-calculator@<2.0.0",{dependencies:{"json-parse-even-better-errors":"^2.3.1"}}],["bin-links@<2.3.0",{dependencies:{"mkdirp-infer-owner":"^1.0.2"}}],["rollup-plugin-polyfill-node@<=0.8.0",{peerDependencies:{rollup:"^1.20.0 || ^2.0.0"}}],["snowpack@<3.8.6",{dependencies:{"magic-string":"^0.25.7"}}],["elm-webpack-loader@*",{dependencies:{temp:"^0.9.4"}}],["winston-transport@<=4.4.0",{dependencies:{logform:"^2.2.0"}}],["jest-vue-preprocessor@*",{dependencies:{"@babel/core":"7.8.7","@babel/template":"7.8.6"},peerDependencies:{pug:"^2.0.4"},peerDependenciesMeta:{pug:xt}}],["redux-persist@*",{peerDependencies:{react:">=16"},peerDependenciesMeta:{react:xt}}],["sodium@>=3",{dependencies:{"node-gyp":"^3.8.0"}}],["babel-plugin-graphql-tag@<=3.1.0",{peerDependencies:{graphql:"^14.0.0 || ^15.0.0"}}],["@playwright/test@<=1.14.1",{dependencies:{"jest-matcher-utils":"^26.4.2"}}],...["babel-plugin-remove-graphql-queries@<3.14.0-next.1","babel-preset-gatsby-package@<1.14.0-next.1","create-gatsby@<1.14.0-next.1","gatsby-admin@<0.24.0-next.1","gatsby-cli@<3.14.0-next.1","gatsby-core-utils@<2.14.0-next.1","gatsby-design-tokens@<3.14.0-next.1","gatsby-legacy-polyfills@<1.14.0-next.1","gatsby-plugin-benchmark-reporting@<1.14.0-next.1","gatsby-plugin-graphql-config@<0.23.0-next.1","gatsby-plugin-image@<1.14.0-next.1","gatsby-plugin-mdx@<2.14.0-next.1","gatsby-plugin-netlify-cms@<5.14.0-next.1","gatsby-plugin-no-sourcemaps@<3.14.0-next.1","gatsby-plugin-page-creator@<3.14.0-next.1","gatsby-plugin-preact@<5.14.0-next.1","gatsby-plugin-preload-fonts@<2.14.0-next.1","gatsby-plugin-schema-snapshot@<2.14.0-next.1","gatsby-plugin-styletron@<6.14.0-next.1","gatsby-plugin-subfont@<3.14.0-next.1","gatsby-plugin-utils@<1.14.0-next.1","gatsby-recipes@<0.25.0-next.1","gatsby-source-shopify@<5.6.0-next.1","gatsby-source-wikipedia@<3.14.0-next.1","gatsby-transformer-screenshot@<3.14.0-next.1","gatsby-worker@<0.5.0-next.1"].map(t=>[t,{dependencies:{"@babel/runtime":"^7.14.8"}}]),["gatsby-core-utils@<2.14.0-next.1",{dependencies:{got:"8.3.2"}}],["gatsby-plugin-gatsby-cloud@<=3.1.0-next.0",{dependencies:{"gatsby-core-utils":"^2.13.0-next.0"}}],["gatsby-plugin-gatsby-cloud@<=3.2.0-next.1",{peerDependencies:{webpack:"*"}}],["babel-plugin-remove-graphql-queries@<=3.14.0-next.1",{dependencies:{"gatsby-core-utils":"^2.8.0-next.1"}}],["gatsby-plugin-netlify@3.13.0-next.1",{dependencies:{"gatsby-core-utils":"^2.13.0-next.0"}}],["clipanion-v3-codemod@<=0.2.0",{peerDependencies:{jscodeshift:"^0.11.0"}}],["react-live@*",{peerDependencies:{"react-dom":"*",react:"*"}}],["webpack@<4.44.1",{peerDependenciesMeta:{"webpack-cli":xt,"webpack-command":xt}}],["webpack@<5.0.0-beta.23",{peerDependenciesMeta:{"webpack-cli":xt}}],["webpack-dev-server@<3.10.2",{peerDependenciesMeta:{"webpack-cli":xt}}],["@docusaurus/responsive-loader@<1.5.0",{peerDependenciesMeta:{sharp:xt,jimp:xt}}],["eslint-module-utils@*",{peerDependenciesMeta:{"eslint-import-resolver-node":xt,"eslint-import-resolver-typescript":xt,"eslint-import-resolver-webpack":xt,"@typescript-eslint/parser":xt}}],["eslint-plugin-import@*",{peerDependenciesMeta:{"@typescript-eslint/parser":xt}}],["critters-webpack-plugin@<3.0.2",{peerDependenciesMeta:{"html-webpack-plugin":xt}}],["terser@<=5.10.0",{dependencies:{acorn:"^8.5.0"}}],["babel-preset-react-app@10.0.x <10.0.2",{dependencies:{"@babel/plugin-proposal-private-property-in-object":"^7.16.7"}}],["eslint-config-react-app@*",{peerDependenciesMeta:{typescript:xt}}],["@vue/eslint-config-typescript@<11.0.0",{peerDependenciesMeta:{typescript:xt}}],["unplugin-vue2-script-setup@<0.9.1",{peerDependencies:{"@vue/composition-api":"^1.4.3","@vue/runtime-dom":"^3.2.26"}}],["@cypress/snapshot@*",{dependencies:{debug:"^3.2.7"}}],["auto-relay@<=0.14.0",{peerDependencies:{"reflect-metadata":"^0.1.13"}}],["vue-template-babel-compiler@<1.2.0",{peerDependencies:{"vue-template-compiler":"^2.6.0"}}],["@parcel/transformer-image@<2.5.0",{peerDependencies:{"@parcel/core":"*"}}],["@parcel/transformer-js@<2.5.0",{peerDependencies:{"@parcel/core":"*"}}],["parcel@*",{peerDependenciesMeta:{"@parcel/core":xt}}],["react-scripts@*",{peerDependencies:{eslint:"*"}}],["focus-trap-react@^8.0.0",{dependencies:{tabbable:"^5.3.2"}}],["react-rnd@<10.3.7",{peerDependencies:{react:">=16.3.0","react-dom":">=16.3.0"}}],["connect-mongo@<5.0.0",{peerDependencies:{"express-session":"^1.17.1"}}],["vue-i18n@<9",{peerDependencies:{vue:"^2"}}],["vue-router@<4",{peerDependencies:{vue:"^2"}}],["unified@<10",{dependencies:{"@types/unist":"^2.0.0"}}],["react-github-btn@<=1.3.0",{peerDependencies:{react:">=16.3.0"}}],["react-dev-utils@*",{peerDependencies:{typescript:">=2.7",webpack:">=4"},peerDependenciesMeta:{typescript:xt}}],["@asyncapi/react-component@<=1.0.0-next.39",{peerDependencies:{react:">=16.8.0","react-dom":">=16.8.0"}}],["xo@*",{peerDependencies:{webpack:">=1.11.0"},peerDependenciesMeta:{webpack:xt}}],["babel-plugin-remove-graphql-queries@<=4.20.0-next.0",{dependencies:{"@babel/types":"^7.15.4"}}],["gatsby-plugin-page-creator@<=4.20.0-next.1",{dependencies:{"fs-extra":"^10.1.0"}}],["gatsby-plugin-utils@<=3.14.0-next.1",{dependencies:{fastq:"^1.13.0"},peerDependencies:{graphql:"^15.0.0"}}],["gatsby-plugin-mdx@<3.1.0-next.1",{dependencies:{mkdirp:"^1.0.4"}}],["gatsby-plugin-mdx@^2",{peerDependencies:{gatsby:"^3.0.0-next"}}],["fdir@<=5.2.0",{peerDependencies:{picomatch:"2.x"},peerDependenciesMeta:{picomatch:xt}}],["babel-plugin-transform-typescript-metadata@<=0.3.2",{peerDependencies:{"@babel/core":"^7","@babel/traverse":"^7"},peerDependenciesMeta:{"@babel/traverse":xt}}],["graphql-compose@>=9.0.10",{peerDependencies:{graphql:"^14.2.0 || ^15.0.0 || ^16.0.0"}}],["vite-plugin-vuetify@<=1.0.2",{peerDependencies:{vue:"^3.0.0"}}],["webpack-plugin-vuetify@<=2.0.1",{peerDependencies:{vue:"^3.2.6"}}],["eslint-import-resolver-vite@<2.0.1",{dependencies:{debug:"^4.3.4",resolve:"^1.22.8"}}]];var z8;function Lde(){return typeof z8>"u"&&(z8=ve("zlib").brotliDecompressSync(Buffer.from("G7weAByFTVk3Vs7UfHhq4yykgEM7pbW7TI43SG2S5tvGrwHBAzdz+s/npQ6tgEvobvxisrPIadkXeUAJotBn5bDZ5kAhcRqsIHe3F75Walet5hNalwgFDtxb0BiDUjiUQkjG0yW2hto9HPgiCkm316d6bC0kST72YN7D7rfkhCE9x4J0XwB0yavalxpUu2t9xszHrmtwalOxT7VslsxWcB1qpqZwERUra4psWhTV8BgwWeizurec82Caf1ABL11YMfbf8FJ9JBceZOkgmvrQPbC9DUldX/yMbmX06UQluCEjSwUoyO+EZPIjofr+/oAZUck2enraRD+oWLlnlYnj8xB+gwSo9lmmks4fXv574qSqcWA6z21uYkzMu3EWj+K23RxeQlLqiE35/rC8GcS4CGkKHKKq+zAIQwD9iRDNfiAqueLLpicFFrNsAI4zeTD/eO9MHcnRa5m8UT+M2+V+AkFST4BlKneiAQRSdST8KEAIyFlULt6wa9EBd0Ds28VmpaxquJdVt+nwdEs5xUskI13OVtFyY0UrQIRAlCuvvWivvlSKQfTO+2Q8OyUR1W5RvetaPz4jD27hdtwHFFA1Ptx6Ee/t2cY2rg2G46M1pNDRf2pWhvpy8pqMnuI3++4OF3+7OFIWXGjh+o7Nr2jNvbiYcQdQS1h903/jVFgOpA0yJ78z+x759bFA0rq+6aY5qPB4FzS3oYoLupDUhD9nDz6F6H7hpnlMf18KNKDu4IKjTWwrAnY6MFQw1W6ymOALHlFyCZmQhldg1MQHaMVVQTVgDC60TfaBqG++Y8PEoFhN/PBTZT175KNP/BlHDYGOOBmnBdzqJKplZ/ljiVG0ZBzfqeBRrrUkn6rA54462SgiliKoYVnbeptMdXNfAuaupIEi0bApF10TlgHfmEJAPUVidRVFyDupSem5po5vErPqWKhKbUIp0LozpYsIKK57dM/HKr+nguF+7924IIWMICkQ8JUigs9D+W+c4LnNoRtPPKNRUiCYmP+Jfo2lfKCKw8qpraEeWU3uiNRO6zcyKQoXPR5htmzzLznke7b4YbXW3I1lIRzmgG02Udb58U+7TpwyN7XymCgH+wuPDthZVQvRZuEP+SnLtMicz9m5zASWOBiAcLmkuFlTKuHspSIhCBD0yUPKcxu81A+4YD78rA2vtwsUEday9WNyrShyrl60rWmA+SmbYZkQOwFJWArxRYYc5jGhA5ikxYw1rx3ei4NmeX/lKiwpZ9Ln1tV2Ae7sArvxuVLbJjqJRjW1vFXAyHpvLG+8MJ6T2Ubx5M2KDa2SN6vuIGxJ9WQM9Mk3Q7aCNiZONXllhqq24DmoLbQfW2rYWsOgHWjtOmIQMyMKdiHZDjoyIq5+U700nZ6odJAoYXPQBvFNiQ78d5jaXliBqLTJEqUCwi+LiH2mx92EmNKDsJL74Z613+3lf20pxkV1+erOrjj8pW00vsPaahKUM+05ssd5uwM7K482KWEf3TCwlg/o3e5ngto7qSMz7YteIgCsF1UOcsLk7F7MxWbvrPMY473ew0G+noVL8EPbkmEMftMSeL6HFub/zy+2JQ==","base64")).toString()),z8}var J8;function Mde(){return typeof J8>"u"&&(J8=ve("zlib").brotliDecompressSync(Buffer.from("G8MSIIzURnVBnObTcvb3XE6v2S9Qgc2K801Oa5otNKEtK8BINZNcaQHy+9/vf/WXBimwutXC33P2DPc64pps5rz7NGGWaOKNSPL4Y2KRE8twut2lFOIN+OXPtRmPMRhMTILib2bEQx43az2I5d3YS8Roa5UZpF/ujHb3Djd3GDvYUfvFYSUQ39vb2cmifp/rgB4J/65JK3wRBTvMBoNBmn3mbXC63/gbBkW/2IRPri0O8bcsRBsmarF328pAln04nyJFkwUAvNu934supAqLtyerZZpJ8I8suJHhf/ocMV+scKwa8NOiDKIPXw6Ex/EEZD6TEGaW8N5zvNHYF10l6Lfooj7D5W2k3dgvQSbp2Wv8TGOayS978gxlOLVjTGXs66ozewbrjwElLtyrYNnWTfzzdEutgROUFPVMhnMoy8EjJLLlWwIEoySxliim9kYW30JUHiPVyjt0iAw/ZpPmCbUCltYPnq6ZNblIKhTNhqS/oqC9iya5sGKZTOVsTEg34n92uZTf2iPpcZih8rPW8CzA+adIGmyCPcKdLMsBLShd+zuEbTrqpwuh+DLmracZcjPC5Sdf5odDAhKpFuOsQS67RT+1VgWWygSv3YwxDnylc04/PYuaMeIzhBkLrvs7e/OUzRTF56MmfY6rI63QtEjEQzq637zQqJ39nNhu3NmoRRhW/086bHGBUtx0PE0j3aEGvkdh9WJC8y8j8mqqke9/dQ5la+Q3ba4RlhvTbnfQhPDDab3tUifkjKuOsp13mXEmO00Mu88F/M67R7LXfoFDFLNtgCSWjWX+3Jn1371pJTK9xPBiMJafvDjtFyAzu8rxeQ0TKMQXNPs5xxiBOd+BRJP8KP88XPtJIbZKh/cdW8KvBUkpqKpGoiIaA32c3/JnQr4efXt85mXvidOvn/eU3Pase1typLYBalJ14mCso9h79nuMOuCa/kZAOkJHmTjP5RM2WNoPasZUAnT1TAE/NH25hUxcQv6hQWR/m1PKk4ooXMcM4SR1iYU3fUohvqk4RY2hbmTVVIXv6TvqO+0doOjgeVFAcom+RlwJQmOVH7pr1Q9LoJT6n1DeQEB+NHygsATbIwTcOKZlJsY8G4+suX1uQLjUWwLjjs0mvSvZcLTpIGAekeR7GCgl8eo3ndAqEe2XCav4huliHjdbIPBsGJuPX7lrO9HX1UbXRH5opOe1x6JsOSgHZR+EaxuXVhpLLxm6jk1LJtZfHSc6BKPun3CpYYVMJGwEUyk8MTGG0XL5MfEwaXpnc9TKnBmlGn6nHiGREc3ysn47XIBDzA+YvFdjZzVIEDcKGpS6PbUJehFRjEne8D0lVU1XuRtlgszq6pTNlQ/3MzNOEgCWPyTct22V2mEi2krizn5VDo9B19/X2DB3hCGRMM7ONbtnAcIx/OWB1u5uPbW1gsH8irXxT/IzG0PoXWYjhbMsH3KTuoOl5o17PulcgvsfTSnKFM354GWI8luqZnrswWjiXy3G+Vbyo1KMopFmmvBwNELgaS8z8dNZchx/Cl/xjddxhMcyqtzFyONb2Zdu90NkI8pAeufe7YlXrp53v8Dj/l8vWeVspRKBGXScBBPI/HinSTGmLDOGGOCIyH0JFdOZx0gWsacNlQLJMIrBhqRxXxHF/5pseWwejlAAvZ3klZSDSYY8mkToaWejXhgNomeGtx1DTLEUFMRkgF5yFB22WYdJnaWN14r1YJj81hGi45+jrADS5nYRhCiSlCJJ1nL8pYX+HDSMhdTEWyRcgHVp/IsUIZYMfT+YYncUQPgcxNGCHfZ88vDdrcUuaGIl6zhAsiaq7R5dfqrqXH/JcBhfjT8D0azayIyEz75Nxp6YkcyDxlJq3EXnJUpqDohJJOysL1t1uNiHESlvsxPb5cpbW0+ICZqJmUZus1BMW0F5IVBODLIo2zHHjA0=","base64")).toString()),J8}var X8;function Ode(){return typeof X8>"u"&&(X8=ve("zlib").brotliDecompressSync(Buffer.from("m1C+PAVy82TZOdqKhkjfscdNwVRkofPqJdKVqBRs/f0HeyOaY/euQpHsGl7zjxkr1ekEVL0VN8bAASvrSk6lGK5bWNUW2gmbVg4kLJkFlQunKWNeuIXLTU8QdnGNGz6QdOLgn+qRbL1Am15hp9EwXaM9MUVQyvGWJKkY/qfYkjCFLNiw8QsbYw714mb9cVQPTdOIpxbG3qQJ/DXVQWB6DaDjpnsFECKYFlmiaXmJjVnHFHFM0lfv0xquDTVEKx7gsQ6XwCBPWuk/V0BaLl9n/5+fr41fYcUJkZhcg9ecVplAXsswQlhrR4ctPLviclz585faf36+Hsolg/FDoKa20lfc7nn1xlkhcaTkAI+Rl5nFoufb5Qm9iT6nWmenS+ga6GvUyRVCzWT4KTddWJi9m2rH1SUhHpAEiEnVPu8avp/KplVcRJBOgtOQ0ekqJz2oeZBvhkDlJilbjSHcDNBVVfKT8i2ZOVusqpS98HMbkB/gZPV3VIe+vq0vjQY72EL6BbOK/f/7S6vUlmEYpMGexag3yHzc9gJxtNly1X33vuspUq9Uks5IlhthkO69777/S1Uldct2b9tugiXgJNok5Xw5i7+kko/dMNiz2D3E0RIEQbL/v2r+++1dcz2SvExA8uPetKyz8QKh6xCh1TskTeXuhSo1xaPfAcThyCFkUQ535FjZlUYzBBzlFEsvlyH/SBVN9X9XcwYAKd8LiC+cA/6vORvwuoMD/nDDD63/XFYvddJ7e1Wr1z9Mz49zTgFpATt0ddyI3rqknp8C+0ZbhxwyoD+N2jFQA2TArodjFzfj4z9Sy+E9Ccyo22nWzgTZ77O9n+lbkr6B9U16SqRoM2+QAUUGDhLg/L/puVPrt0QQ+kz17aoS7/kyIGcUOkjTnpGRIwbruf892C5IGEjiiYWJbT8NOOCO39/MUAny7l6QuN6jaDHGvJ6mYEQLpWvtplCGlsVEfi1HVm32WttlEiIIwZT315gqrelOcxOQIOJ+WYq8CjJVtDdpR9Mvx41KGeEYa9Z6dG97EhEBEYg7OMWWCZSZKmk7v7xUK3RqmKUfcvr8sdu/yJSTHJLgWg9kDKaX9SdiVAw6ws/9GPNVm5jOn06VsTwQT8QvvhR6GdrbPO/5/Es2pVTAQgiRAuH7qnhndyx3Duf1qyxiESIIhSig0/t4QlbaSgSP72Zhcohog72E/o392U+qCuK4TgIzfMpq6Ko0nwrsZ3MP6+8L1K97L/f9jr++qmceRP6bN4jMkT7E7Wdi6Hrw5xjSDl6/7xeeV6Ov/hjEAww3sxdFb7D8JtstxCl6J1BPdO1Z8hOC8Ljc1WT6scrPny/mytROraCA7tDM65YEiBkAGzuSEyxS5OWnNlKUu59CeH27yrxnfWgrh0MsLVtgsEeAfxv7JrzzCzhulLY5A1BNulikOmMI0BQsiaind0+z8NHRov60a38N6SqoMpjZeUAKGJHa8xXcuGy+iGVqPsn4n4lxZLHp8NWJ9tJR/dcmubfZw8oRLeP+YwJ5HN77lxdB5eQyg24dcMDO448eToKbOZ2AE6S7m2ZEKEw9cc8vpm29DPbBzEmPCjhene2q0GOI+G2TJk3KRmaza58vl2Qa2w5QSH7SPH4tp73jbY4VzNXfFCSJTy5eW9myvQvMtVOPv2piQR8sypLT5uBgMIuNX2nfO9cFIE/cbNEFtBb6SNXvescVYG8paLi41q25jer7DREwzBDiin+gU3cUPcUykEAGR+CbPExUcsCh1K3vj5FsIZwhbLDV0J59/iQebYA2Xg9ngCQFn6YmyLzarvvMV40FkpvX3ZsoyAKSLbbezn62YdHdtL70LPs+ENHvsisvBr2HzmTOgPXwxDzAuesLHEekwRH4gvikAM4H9FoisKNMh3Vqr7V3IabpTrcH7NqlKgBTARmz+axANF3AwclQMpMHzLyYwQJzW4A66l7HFr7qTBq22ylB9Too6j9kyErLFjkNav8SniLzx1QsZmBZCVGfLuSMKLbOv5ak5ghUG7nSp67Wsl+ITx2BOanBDFDJF6XCTEVKsOCLSwv4/h2Hz4TJCEa9ECIuHFAyiXJIKWDnKGMzEM5HOcBB1KQ2ot+adDK/sY1AmfQ6tjpQkIB28WzGLt/x3MYt6TyOXYcKqAbV5g1IVsaM5PJUJkgrdWQiIFBsse+lYTpfZyegzNx5dAgI3YgBmsxDf4NVJHwrJBxx5iHnT/9y+MzugCRZPkblM87h9vjQ5n//ttO9ckUb/XCMziwGyKvMc5neMrNgfrF/zOfgHBSpEPou1POFo9aGx70OvcNU4ZRcmOt1YEGGp24L+2WTH5WgNppi5YdjinF18p1fROEdYpdKHSC2nGbBYguTnN/KlTmNq+NGUijFjD1L4uO8Tf4SNpZYiXLatoXkli1zPsu22QohMWMFSSkERIj1PG/Wb/DSYeDMT6ns9eetiM33I3aTlfTKzCmMmxNK/o7kFwqUyVRMuBA43O+F/JibH2uL9qAH3UmrqLpXNPP6S1IDMNQeRz1oBEKOnmmHyXPMMUO8r9HtrtJlYbba6hqI7Bo+3Lg0uMTdTwIZkL2XFlPrm4L70H4fBAU2oqCgo3q76cXT5Y35KnddqFl+OdIxS5+2oXQc0zoYgrDOGrP9Ocxv7bGVp3gKF1m1yGlGAcC3O7TvynEzIotd74LYhMjFviQwmq/brez7cDL1BXGg3klslvGMpHQfnyvEEGnTcqZ2aecExkWe4nZtr0Mgj4vLowe0uRF36B+4GKJoSw/tt5gddLhHc+ucO7ynBQizZdGDPe8lbrd2nUXP9EEhGawuniCsXGpplXawyB0q0ZWrGSC8SS+EUvRSQzRIVdEIrjkqFoSz66MkvnQggD6iGomN2GDfZl7DJGg7c7yk8oBd0dWudbKhIwm6KvzKmQNUXlv5kk5DBkq5rTJg2odmeY8BsNetlRzxp9U0gZAe/tZA7xFAgZFRgwBFT5ddFlTTtS4DXZS7OoFi5PBFZnNE2iEKTD5nJHyd4BD+KpZL/JzCbMFI3KyRZg/fyvl1M4A+AnRUONg5Lq+Srmc44PdTNz1dMNoXVBKJrNwYOwh1CSn9DiqJhJKHb2VNz6nl6975W/oP35CT3tcd+P3DkFndo01QMHnF3cuAnujaqsZ7GRqesfZj2L9yCYTwt/tLwCD6QS2vCi+tYYXYJOyLMKjr20blISwcy+yNgVkQQl6pXA9EMs65U4bqJwyXcxDeRBLNdKlFfRjw9N+gcWzrF6OumkIcy3v2Xes2CP19Y9MiSHKgb/A+A3ieue+uSq/3+mUQoZEmxDQYiLExaJ3RGLTDmu8Z6Lbaw783aynMlJzW3DaGbc3NtHtOIlW/H1qxeYhhT8voEQZv4KaXnbq6/2DasKpSRJNQEd5MJzOkK7qKm8PFjajUYAlQHFkh788Jo+ehLS75Lw6udwK7B4aPXW2guzt1/4L9POEmnM0nnUSUmxfC3ZMq7p0Sc8DdNLZq6iv18Q0MUpXW91D1UVlXWEiuZXXe+HL2XdlXtXfju5OH79XruecOxLkdbDQ+bl029Ll54T/79OKAf4+iXiNCxa52R3QVVJ4zPPwXbKLOFdMedoYPgM8fZuzCsoQfrLDGPdWlBRveM0jZxl/B+Hm1ngQm7mCIx1bVVScGYr8GKsjPTYZ3eU3sJafJzpJQxn+eMmy2AjvZOF/Hr0LdAI39xeRH7CVAtGLfvhIPFUzu8W77yyb+0fnwFnoZMEDAqazVn44ghy6eEcucuhU7244M8daVQj3tCyPC1tVgr5JGLepbCgcae/IDCQfUKHd7kU3rP+hoTuM7hWE6jRsnp+2jHy19VPOBxdisgdgGBr++57mPCuLSmGqjqU+2E1sWLn3qZOp/yFpl21DSgXI5OGRQ3DxEwUdCD15QEQcIDnTMx1s4UDW7p024AMApI/pbCklZeHQMrd2mf4UQ6xoBABvhqrTutAw80DIfuf1AwRwU2ff2yPY19+GNz4/hAewD3fKjzQ7Uyi5uIy4AgLtzSF04fef06O2aO6e96REA0N4d7KHZcerlA/MDNXMwg/K2e/CRAz+YyvGuAxwnUTcf2FkJVgConacbu2tHB6N3sA3mY00PVM5B8zy0kO9rao/ADdE9GsPBx/E2DvLd7c5swgUAZoDZ5cJzjcNo7Tz6uwnJukYA4DxZCcfBUYV0NxpN/C5twPe94NB7LFmibd7ZrWQAAJwUgfz/58q93RrHU+nGoqOxPzsjBKpjs6NkBJTqrocOBP0ld1/N+Q6GvhdEhGf2JeLynd3qLQDARwXKsomSjUVSZyYS/nOafIVias9mn+5DjHe2ZQIAExRBXnich2seROTPWi6onOCKejE7QaVZWwCgBKwYDrBRe6nGDiqlSwCgNUpz0rtnEMSeT/Wah7aH9UR0XO8StgBAOLn1TS0Mo2dEe73E1EKPS40AQNlHicYm4A9o/KMhuHkXNsPL/a2UVwEQbvZbmvG7+MtszUxr+TOgP5PprdvTen2YGZWyBQCSY5NPDAOYuL1VVhYw6UYAwLdVkrFJR8csfu52B4yOX1p5V6zclnkCxVvvkgwAgHtpQ40XTZ5+dooruCozrttN9HnC/hazIz1M9dLhzshLuh8I9Gx+PzE1vd6ltwAAA0s/jfNCj3kwivzgxAUaa+1Z+tsOydpMAOAMdiZcgM9/bv/qPG5ebDnZdhq7NZtwAYAaYHqY8KDoTxzWUvva33aINZcAwHor+iMl1/Sf+cWF8cOOLydxueVNuABAmf50wloSPvsThSi1j/1thVizCQCMQUvYdMPvP3tVqQnp158md60EKwCizvF6i+oroel+bnYj7DNm/iXdS+N+paJ7q3EBgIzZJJVquA+QcB9ApUYAYNg4zihUXM6h1VVy75nbcpsxffNcEvC7ywimR+l5YvV+BaKpF4gR+qbI70UGexJKbAeJJEU0MCAr6XBCeNKGXTXCks5CS9agR+J1G1t6FTpuSwRXA1J+/bxsShzRp6La5a7TO/nU9UJ4n7TpIgWfxbspuOgK1DLa9i3teHXsfaOpxfyXeMjbaPHxNB7A+dGzeRfFPvrX6DxKmi02tx8ilvH+6LtkJpAigY30a8Tbjom5RVmCfpTZ6eSFBLvvOG8wXIDl4dHr8m1a142Tc1o5JGF8vvBCET16Va88RdJ5viCPX7i0dDrNsaAAGaBWHPCGyq/wG9RujEMDBQ8lqmvzx8bIHwjDFCN1nnHEhdO8O5krpCdm3CdzcDom3rPayGpcHSNT4Rixd9vivdP8vZl2DUDOGpKmLT17xVhSQOuWARItpehqVoTuBmEZvB3aiwEKwR33v9q2X7hzLBggIdqzA1c8JgP5+xiQA7GOAqWKdO6BA/68hkRsmhQeoNDj1B1rzXsch/mIJbspge/gY8gLi1f16uoyELGDXH7JnDndHMKSnXMPDRSk/ZBScnbTTB/Ef5KoAQliiGUQnfsfoPWhxii1WGBI7cqGCvvh7wljgE/7yQlkaIXfmlgWLoUVOGTd9mXeaFpOTWRKh8j+psTByU/Gi5TE3aMhngkRiFouCB0/je1eHq0fic4jPy68yH2fhJD9BqdK8vF9IsK7g94nr9VBEwcAXrODsSFWA0wF7VqTk4Aggn6k+bta5PzHMJ2Uxl3OspKTQtQoKQY+toTphIyRQ5iT34wYPAj866D64Am+jle0ESv5TjjrEZEbeuI6qSI7zqQ/S8Dy+vSctf8jkov6r0vWZm8XbdgVb6Bzxhd/c/NzfxflYL+U/x+cwFhRIY+rH0wuGBPqe0zsh4GT42i8/HA9CTiwm32QrorNNmldtDszLhXtDnzxunMr6BJFk1j+YtF68seYZB/3XosuGf3aJYjwU7Wzt9gJQEgoxyfRb3nx5F5zDgKQo8kkqcoRkzqi2RG46b9rm4TJ8/kNTA7su1lgTWfAm0ZzkFCybGAFeDxiMZGYTgLroPhmywQbMqEUBgKJS8BwRDcsXf5GJsBbRdUTCSiXAaNCNdivJvhWj9WCDKZidpFK3fYRXivzV63bP8Jr8X8pWwmXOflwGwFzM20o+e10mljk1M6T+K41vlVv4lCOYa0/ykC5vG0vgZaE1iJgYgFxPcgI+m0jbQJFmfNSg2/yTZZOVuycXUze6XkrP4P3EBAH2GndvAIJH6FOFjyAomXX8XdaACL3hkQQrDLl/RggQSoLSzJJXOhSTxyxaT5l5wJxWZIa8MM0XAPdtEX1to7BTTulx1IyympHm3r/noRE/4k+Lm1n74AyMx+2Gz3jWCLASptQuDUalh2AHKzorRtCdVmOi3FeE/YhE8r37GFh0rmfXhuKPTEzOccTszy8Ens8kA1nZrTMJodU8clgBfpYfH+fawrK1XlNJ15dMspl92NPe7dwRLFaxO1hwVYBnp5DX4OL0agYbxLOVXY8kXsr07hLIldttvxtr5ONmmyqwFhX4svRDktSz0EYniVukwQ4OVIW4BDegfl87qqmlnDB12LFDpsZeVr102YOEjOXdn/k87Y3wFjh78dSfkbhd026ZgETV5z9sdNzZpXjJVeeYurBsgRttPX8bfMcMKrZ+yUB0FP5EtKo/Ry6qTar0SKTdRa7ToSqNNFjBI5mx8D78T4lszN7dlFXmh6gWIE4cGTGZ1YyAwA+PDQ0ZeH1IUvt8WHXaAIA0cmp8jmcYZXAgrdLOzV93uBJdzVNaRPzIBuNhjpvV75zZ273RaPN9OoUax0ubjMzAEDx0FAklY6PUrubyfewGzQBAOfr7KjyLRx0pUAvVyeVfWPc3aqnEskM6tZoJlpjftPtzN7dSkwF4LGaxAYRbjQ5FH9rZzuzN3fwOE2vrFjLuDMbmQGAWY4MtVxe2AO1dpUPuUETALjSMVV5iOOpBkiIq7cst7L1V2R+MRLwD95DvnDvDf/VDVSIbmchCt5cyDaaAQALhWpoJtR17szZuY6kt0sI7aYzXwfx/VG9M+4ey78FdkerP+qStGBM6gIZK+ySm6GaHeiZkBrL7khQ+T1ZhfRLsAOqzcyu8kdXR5VkFdOalvu0f5B+v7CLyTT3ZjKy1bQvuNk1YqFqj5y93G7ZQoZVwlEFT34xIfi8JY/YKX7JkEenwyIrF3Dv095HOjQxLXr96JLoi0w7lqlNkBmX81vtJ5iDTjrl8DLFY0FimPAaCSO3dIVBoRL3XIBa9iCDvYBSBhrp7bGyhBiE0qP7U2O6f/MTDWiXuGtzPTmXRs1dBzcrecwEZCkiDGt2RjLK44Wy36YhkWlUQ5VZiu5h1plf2EnsINPNaVuWF8mKxowqMuqlA0cHgD2iQ7N25krgITwnu+PK1yjpHavNzpCwNsfRMWt6PL2fjUEO0zJm4K5JUeukY+uiowCXE0079N4Of3tnbUh3CwzPuElcmNtQcReoTypsmABoXjPC2tH1LN/w6skDYhUorRlcePt8OzBKHyIni4YdX8KQYSM0E2IUjWPTYJySMjjuj/E2s0f2sGbF6FyTd9PxSiI58JsMzsHWzsVn2b2osa4Nc8iWliKDIECQMJUdsN89z9S/l5MQ+YFnb/sxQj9TbcWpEP23jhkAmFacK0z4LKEX37jbsdmpQl2bQ0qrt8oEACjDzXSUtvjDv+Eepb2jF/ru4DpWNGIG8Rirl8c0n1ExJx8xvHFjPBe06WAdPvazdga3LCAYdIwLng/4f+J4ZE7Wl88hUXfE4radwA0PvJn+BPVL620RjsKXoa1T5T+6iPK5oeP+Vdf1yIdeJYLSJB1eWBT2LnPTG7uxALw7+t718mw66l7vhqNKqACj8P/u3Cg/kUhdO3CXClYOfXLEzSinmA1uCH97AggF2NpKO4nRoTdzz2E/O5oqsbPeSktqnSxyVMVodyxaG1Hv6Kz0VAX2VmKDUCJrbZR1soNi8Xv4eOsUB7iwkswVj+Fhw7dTBQ1QFldhM260kvWvnNnorPxXmPJ5AhO1tb7emNQFnJbWUsfUTpC/4fw6IbWlw/qxp+G2Avf6kAQs4Lokaiprll1T4RuiefD+8+PVIwAyRfTqw1s4sm7ljSB7nhkAsNGxEM5BKX2Oalq396DDOU0AIAOdr7t3Cbr9+ygBgLtYjuqayR3QjnFN01Xjo7t8ZgYU3tg6PNh+eWl3tojNTVY5ADkiPLZS02wev8o7cf8R0s1nPGdEf6anHIAcEn77hmtmmydb2rv/7y3bQeT8ET37UNTd7bDaz4iW13+XGQDghVZnIcxRUbTQk7q9hR7OaAIALR6Z89T9Lg0ybxIAaPDVzWqzB93r2HP3tePDL+Yh/avb0g9F3d0+LQPRpxxF53VmAMA4pOINLaMYMu7M/DOkCxsWjT10SGtaojbfde7SC0Qd+ku9HMZyFv7QyUfJaMi4QQiphdjaF3KwDS/3s4JGBtvdFy+Q5A/sVAHTCRQAw/s2QV2r8A1w2g0PYyl0guvHNEHJTbFkLFN9yBcQqO4PXKzrVkUE4b8IalGcG0KGHIGyX9ANI2QAIHsCIsmeZyV0Ai86qhqkmfraB9pRn2fTgWjpChmuTLvbyOs+fW1cPn5tzsePXpeKZxjFHgiM65LMb8jo0MPQd8tOoDYXCef6HRCIQRQbuHRtFgR0fbp137sf5dadeuwljY7lRX1Xzo3/gUZ2aYbJHSYnua/AvYy8U6JtKDH0R88yskb2aL9dk3PX6swbQBRSPUTpsMj02QwZKeHp0AUGwsk26EzApmUY09HA4KYniUPtL4+f0jSd+jQFAPhaHcwkWdwHpcE6iKPk9LW987B6ck060srVUtb0IBnu9T1NH8b6wMXSERoQOtarxvcu9DWKNZ/wwBIMwVkGvSf9IPansIFO5BcN22IUyZtS0r/RhM+KFhgJ6cAkFT5QQvNSpwsnmBiD61QhuFZHOC12FHl0q5mdhiumOOLer6asBvLgKlNbEan+USd/gZctP2kHAEcnx2SNBt6JE5utNAgpzTmSMtM5WYr1NW+KK1BYrQJC2J1fOvaJ4mEEmgoDCtpy3lq+U6cdSBXdLN12yIdqUDoNMvxCATo+jN23ZLA/VLY7Qhcyou5hcIUZASLgZoiVAu9hdAXion/nHvjEBaDjn9Sterk4rojWlKu+kiuWTXGAHbLpolmp3uVSyFWETbKBfr1QXi3AOhEWfMGI7FZM5RMsSglADPdit9SYzdwKVWcTNMKaIRtoRBgu57N2HypASsRMWwzedo67QyUFSC+F5dLjkAGtRBk79wu2YnXIs6tC+XwkavgqekbAqq8ajmr5nWkVxbzDXGq6DLdCDAmKsheN8mvHXpiL7aXBlFhCA5qAUdCc/dSlzRkyIDUONoIMcpEt6C+b+DKnReOE351C2KHDad+HvoTziQx+eJBKakEVzYpGcTOBN2apu3cb+UuYKufmT4lnkyNMK7oyzphRSpeERBwSVp5E+lengJYxoMJyhhAxUNpnNHZ8pCtdzLXGZI62FTss5WiYZTOGISaDh2c/z5xdu/ZxRBRXpiOi5gnuIwt6p3sNfPZofgXxuYzMyKycAWCzZcklPqQPSk6HtBLxYtFOrgBD5vbzHPPpw8l1IKb9BVMFufTDbccDz07bitDPsbVTRuWOhdG8jsXuyu+9pZhcY1H+TNDXQ0ZXycVcEfvwUp25uqWusdBQ26Y91cTFfhgSOPh1CpMIPxFow6DigPxzVgstrHShsHAfGxfvM5V4qjAf9pGI6UeSPbhVaWxJC3l4t9FJiCb2Ld8f4HDY7SgRIc0HbwbvOybnP7/9JEwC03q8mGvMbf9YSvV47zYjJoj+W8UMALSCGYSWURyvcbmKhyGLclhK7SpULXYKEpsyAQDxseDnGdVT9Qz/9bvcqdUvfE8MKwJwrYDW4kX2AL36Im4oIhL3AaIOA+I5huxJMp8n+LrSZfy1JwSoLjdGR2xoqgJQs44x/01HMKWZivBToNHJO4FQH5DgglUrDhiGCEpiiNZAACVJJDwxvvxzcXL9xy7mb/HpFa0BwG/Zps3VJc5hxwddxl1j620BguAMNESRQSSM7QnJigNse3ZDsnODW98CJBoz6FU0Pn5GfTcwN+u1YOI1t1OtCjE2zUkz/k7BzQijJo5NPxKcv7QWxnyof/lazv1110+i+a9RXrUli9gBfdhVnSuR7h9D+N48eWoogSCdUP3NPcKTC9DGQ90H370UOXFx9Jtme/GYgEnuUDMGJJFQr8cMdM9K8ei/ApPnw/DAJtWoYgrtmRh96lXVqk3OHbV+rjMiI8jmM6IwJzLeU3O5QzsPSIHyZOyWpKcaouhOJPquuRP/+jqduspt0mVmA+o50Bzn00YLAuke5eh7NsuQhtF57YZU5+NC7WQAncaVg3ULt9Qnr4J9TyyQLiJlbVAN+UzTm/wxfYT8P7zADbsGQ1vTRV3LwTvNOuGdJMFoOONsLt48IFcGeh5YLfqzl/WSbtbR0g4za7miXjVqpzcx4Tylqy6/4y5uBx75ayX6PC9Oc8Q0Mmxqb946x7XcCJsN7Woa0HaIkLLBZl+j0fAwgalsSOc3pm8gFZzCAwq44By+x1bBMIipFs2DJPpJyyj0006CB3ANDexnIDf2E3u0CbDV1F0KRnYxg2hRWB6nc04TfnwvX2VMtQedgIrpAZIx0A6EOAz7vgS2MmtWgyj0qGSGPNU0JEdBhOZG6o4rLC5Gy4w2vSNv50+oG5ug4DSY6FZG5OX05QlnmxPHNntlXU+5ngM7Ohr/lSfxn+v6TbQfWwVRudxDRFjMkAn7Mp3pjIIGtS+ykAxn69dCZ9HEDqCqcbt5FX1iTYRd2N/uwjTDmmOCcTLmy6ou7B9JZbYM+Cw0hJUZ8EloYNn7T5+UXcVFTxDzY7o+A8MjYVP2PIPodDyCGXFua6h26NqGWmcngb5ywyJXLuJfPpQHojM9raOuaWrbPO7SaXqt0I7fhFJtoIrXKTH75P88UwW96yihBx9zUHwxgZhIT6C6rhPIKVviEmgf4HNS8TkM56ZREcS8BF77UCfsNMMrM1EvWI4ZK73/nFNixM6uRHlBfas3ct+SuBzyxHBiBzBYUPkSbdey9LCDFlreJCcKkx7US3GOzwinGBpIt8H/XGXBvYfws1Wx6s1lVE90grlEfFBYq0pdYEiCFaGCHRUS/iBOWwuYdt0ocpgedtqNy8RP0XDvXlWa6JrWS0sjnGEZgBAbwY2XzJ4/hzE0FTw5WFw02OfRu3a3vKbExa5Ni2KGPfTRlGRF7BgCArAItv+0ojh+jiUON+w7W+7UfQzFFRC2Ym5x9YXrimrRbeRa+S29Qm+6nHot4d+P/B79TLkx9G95SKjDX1tRK/ScVhIuC5vu6e7wN3wRzonCNHsQ3hj4Gk1YDfMihADipX2EdSFgDI03JBfBr3LEguMfK/EOWtgAYRB2dIKO+U1s3msaWa7+E7USNEoDEmcSlvfu+/PaPUqlg4r4xOBlOr9hPLECWHSZ4l2sqeXuGA+yTxHol3IocGww3gDtlI+CJKdO3sqvpshWhp+ZmcnfKQp0kv+xsBo0An8rkKXuypLPbCu6rGEGAC7TaMhcb2jFhD9vCuUsWn0A3unsYo7tSbCXjZkAwHE8I4fAPTGb2wHnqkZv1wy23dQPABwNSuFQHoLuM3xxg6ZMcaZAy+wvC3BEnWB352yGwPGq76vzfeFddUuqOZCq0KWkTsDsGrMdgFwZC+dAVfNs74DCmO80Qcp1RSaJw/i2YFleG0yfwyoD+wjaeZhdKw0kgJr7FyM9Jn5ZuH0vD06ZzZFLr/b4XXJauSABVJEYr22pQfR+b2/cc9CpXG336HHC6l+fUr1OdRhiUiFbxwwAtAQzHVo5WBqyfZJ0dvDLI8cDNtc9fLxBEwAgGGRPt1ngrqv0trjatbEfAGDhqrWoLG5L9Sm4OGUc46r0vxlcrmYwxjGNdy7E5cpxsHpo8kEczQXyD3Z8p28HbqhF6Gch+eVENBTea80AAEvQVGjlz9GQnk4Ijd2tIWhfiMr8G4XvV8bm4Hdx0WhdOR6/id62bkXAJr7x8OIYem4TRGo8Fbi9cdCb65gXRJfeN9e+G3+Bt3tGm1Dt36vDg5kAC8GFsVWSEl6Y9jquvkvHYSHW2t3pMBenmDp6nAz2pMCb42FKmFBUHqlhSoJkhG3ofkouGdpYqGEqWBmMYHh6kIpYvvSElIJ23l2D+Z2Qd/tjfm/ZdXbd2OUrWHeSwvqsGtFfq5gBgIFqKVh3aCVhS1xVSWV2pu3ikNOt1JwJAJg3+S6yc3/tGsrdej8dZug4OF1b9k+PbfZH0pIvS7+uRfThOL3WuvAzo46VtBvXT5h/qT5fWQwAeHV8yusomY66a5dVDn+quA1/v6u65Si9Vj0lkZIS8jfFcM4jveFyNL9RG/5+F2g5jIADRTrMdSnvo+6UnQe0Eb8GuiWdIWRzK5I8n46b7sh3r/N6mVEqiPa0FRwBokdWNicFMVWtN2cDyUBXkaaL+B06Dkp05svno0le0J7+7PSe423xRL9nVgra8/6Udp5M9niFTjmZXTc2zOiQqYo2FflsYgYAuiV2uszEjnYOX75C4vpUcr+P2vCFexEv3ApNAGBMFhFtWDaM3PYDarpWzgkDVzcXBABB81JXExoUPDKLJERZ7ZGSEChFTEtCsHU/RhIKpxU4klCSKvdqkLqayUBgumqUgvDAKJeMGShw3MbiGHCyPPsscE7m+7bA6dQVXYk4fDoOthsddec3Ihni0yN2+vZhBYIEUH//Yrh1HJR80fxqjsbyDn3x6U7vHVY0SAAVJcb8ZR0i8B7VpwvP8Ul1ME0fIcR/em+XtuePFasWt3odMwBQaYYbwimXg6UH7O2p1I7xWS45O5s0AQAsrka3UeEmlXJpPg2XjQYBQAcXo0VZuFWqj8BtK2M3LqL/hbiQZpDG5SnvBXE5d3yrndi788RIGm1gdnZwW9gL1EgAzvHaXEqz4CRfayRm8sX3G7Ih0Cn4skSSDfajYsbz9UviT/nLpHQQcVwdI77qnxQs6tfAf/mhYqH1+YmdWp78Y7BXTRx4XiTWHyUHZlNGBsHt4kyA5RfDDJgMSWF3bSZ+/YWvTm3Du+ngrrj59Azd+dVmaF+UUgSPVfCriHZLihuS+Y4SY0TiDbqRBPnDMLfj4CSpxMznk5RCKQ152SlnU5h+36qu2q7Rc1trModcAGvaU84igFFGJ6ku2NJdmvjHDIfwFTLntLngfWwkaYqgiIS+Xea49q3fr3i0o/SlDhUDB94sixYv4rD6YUQvyrO8QFlmBELKQJ4zUVBlPAGa2jxxhDazQpVCYdcTR3FXbepUaIMiyProhX8CNjU5LGI1JSoqMFqSMmGg5OQ/qeoLICDd4Z+F2LQEz6NMWnqUnqmNglx15Y0fr+jyM4roK4RTlw9YFD9ujfDEgBcr6o6z6pZ80ng7kUZ10SC0IrYGcyzjk5hSvkr3zF6GaIVRr+VOiO00CPilEorZtcywwJS4wCBB6zjfnbtOZ2UQfKmhVDiSs3+T39+3txkCIBNpFkrgHe2gOWySbJ9+IXTdNfSYSSx/o+Z1tIGVOSRuOtmRRI4q1qfKfu2zdUqJuK7cmCsEivr4Pwx2eIFL1727PUphZLwWTH0fsd32Pd1Wn/4Ggmc5g43yuc8eH7XeuTyt7hAWzURspdL35ICpqIJBcDMyjNDcvNeaWaA6O+J1Cu0RULXNwn2KxRGNtaXjPPRqr3uf+pJ9/bc9u8QF147akczFxYWc6GTgojunE4ILM1GXtXNn4ad3/C1p63UAgJGAL/+NYpoTphZjSrFXO3Pop8ZEjNUWSHTRxKw4wepTMyU79Eke9fhEhz724ARP4jgrtQWybIexNfwQciVDH+uVwOZsAtwJgV3ZCaw2wcCB9EQfTbo+p5DPDm4ze5BahaAr9vUr5lkIIclcDLxcTlHpS7JhpjBZ3Z2wJhPjJpPGy2UC11R114BhI9vhZPMKudhcEYpLn7q7gImNnAWvG3EnWMJ3q+7bsaMJnwjUIP37qmGgtyTykPfxuzX0aXT02OD/fcOWzdCKh1PHfH1drhjpUGj1+N64D6bLzFzuNnQCAxtszDkZdIImohoT/g5aqOPj2kSQB6hvt5HiiaMLebDfAEMiLxfDwMDgzRDfXF0IbFHhiVE8rycBDtIBXOdDwPDs2TDOlVdoN46NHEBSEswR/fulHkz0FA0BdJg3cGKY9kDw9PjYW91juS9ETxWDaaTOYmlTAjQp2ZTIlSmn/EoTVuW+BHaftfYXaNXBpgK7N3HTNNoCI3Wkj73uLT1LW+r8AyByjRJrZlNh6ULakedjOvOvEXs2fyxGThVpThI3wx8t5DO6p+6p3au6O7W644dfk9rQ036u2282XGW3IBxmcTMWaYngcbuG6fCFiyyRTn+jhHmPaBn1XWW3ofu5C7FE2CInwOmZuRO5MLe3OJnH7xPDGqx7LocITerD2GnRwbp26r44LyUwGT6AyUHEQkPncGCaBoBWlkJ3PXoTIrGH4tYwj4jAhT0giIT0Cab8S/EfdYZngYqLCNE7hUuNuR1v8m4xrWUSASRSV6tRKYzJ20tMiCThrgIDKOU5TOJa/v/OGE3bRAXI0/LtPnEDJEvbjMW03EEyURi3nMlRsRxOsZiKjZGEJCZs1ph+KLF0EhMz4RscjNaPI6ouWgKn/qDy+p6PYtuKpc3Nq99/EOqsol0FQRsXzkLafUIR2q49593Uf5QwLSMxuGp1hRZnKvyEqrrdVx8dxio/ewIsbBJwQlooot8HhRTmDI2dEAb04wGNQUFWxf5NpfWcBSHqKDtUzSMBhkz3lPb2kN2U1pphuSETnlv5/ITB2n6nrQ1A+o7zjKjhu8QLzpBvnGhuFf6Wa7LpveA73fKutt1goasrJ1odRTD2qTsn/zPai7QGcLu7s0NZD5e+uYdJJ7x1tUzq3lF/TDVj9S7N9vqj0ZZmq3fP+GOAfmLT2XQ7Ae/7yvrrUrq5v5jGjCQOo89l04gG5KqvN6/Q6TVnqZT3KFw/fT9USi4eUicB6Vhp7LbvQ5tVMjvyPuQHPNI44pKMQ28tTzKIQcWALw6klPrB4Y/XbWv2iAvcrv400O3cUw3WBfm8R7ZhQWzgKus8gVtJYOjYe/1Eehf3zhKiBwjQdUDA6IuQshKAiyAYLVA10+zA2a1Aj3jR88vZQCwgNNMNia6zsCiI7o5+r+Q1cZnnQWH+vxh8WYyL3Q7vTrtXzfk1V3Oy5DFXOzxdLBac/QLb4l+8i3a4yP+yb4GfYMhX4WlxgeboPxoy8P4BoQxN0rKLEOuPUKPXerQ1pYRmOytYaYdEaj37ue70Fr7gbDxTXU+4m8jbcyzGgDiMt3QhBwzplPPNL9hAtiFKBSUMr/QwhPr+utQoWvqqBSruXLzpyXIm2ZMzzTxuqri19SrHO1rXnFwFF25jqrmA21X5C7kx6+mCLt+22VWVTB4tzxVfTbGBWbl5ySaftud8ohxpvMd3SYB5k84XTICFxoAeDy/tEIiwrjMSmsiCdiXkrlFWRATpCh0iRsWmi4CbxfmNNqVDp2p47gxlAR1o0QGfTgIciYqxvagzRwrEzY2meEo02E5fQImoGr7z66gboqjA0KIO5iZ7mFbEpn12Iwsg3EBBc9FVD690qxkIHZLgsABzbEYMvaIIUgzpwsv31HK7E1p2mJYNwoCYlOw1eiWsRP8O8aQxs4vdbsbOyX8DEaQtdqvzzQK6hQx5qUDV+3wRBBmWqMA25g2jCEUp9hZV5GJFtxaoc/aUTuvcUW2vTV7sRlx3rNql3qXK8hOX6fptSxqvTO/g2Ye6+zVNBVTWB2UZ/Y5mpVpfNNbs0i5tbCDfJ21pO2QRa8c37VC+jyp/wY0jUgvMbGXpfUvgMuMZ9PD+TxbaYGCbCt8UV6BSy4dRfOGLMviuKvebWxCjzXP36qpT6Ol6JwDtbgo647ybGm7zCAlXN3L7GYLiczRcdsV19oA+f49cB+LGnhK8OjF0nOzIC5xtdUfqUyM4Pl5s7phpc3QsPS3P/graUehM3k05pIhGKeYpRQm16LNcL6uz5dze5CW53LxIsMEVDwC6LH5peSReCEdBPbxvmm4JpyidR4wXfDjUVTqNh3bqT5OlXnVQFyWFR4ayvlms+Lw3cO8Q+Y/rzYrfosf8YqYs8znymtAlb7kLbGeXPKeFycwjnXWp09tnwUr+xHy7Qjh92aM/gV1Jpe9mcCAmILkqwCSRU/EG0gB5fWM75H0E/Id0072Jx/uEvcwwAcNzR7ALwaOyMTDdFVnDBea4hMryaYahlLDi2W4jEARcWPeNUNIoAFsmeHBJjAFhMvnEnXUA7zeGQk2uuZzmCPL2mHavsQ/g4i2C3ko8AeYRBRN1ialNGrD3VEKgqaYB+0hLrcH6xI0GLo2zOcUhw0p3wPBMEjonkYPfBzGnZSMWOYNcC3PlwUU3lyD/DFJIuu+aYlHK05+MLxS7mhi9wE2DScss2Fqm3wHFkkC3gfG5iViekBk3fJKulHKtEotun8BQWIOZwX7oAYx4ddSw5X+dtQsiJQHePgIH6xJbtY6OlPSzMGknY0uihYQJmmCXbEoDqPjhtOjAlJxb4TaFgadaLWNyWEBhQ/pAMO9IagPN3ZcNGCi2K1txQeT3O9l96RIcnAkzIE7uI/1AGhJVk75u3K8guXV0/EirWSfWZIqDZFMuR7Z8sXMOA37PCqmUvEOAT8tJWY6SyepReI6StHXqQsytDtPq4pb2QzVgvnEC9hVcPNgcKoH0nM40Qyru1e41Gm+RWS/8TKbTPk+cs0EAykoBIq4Z7bdFU7vFP+09CRzQKjRaB9MqWCPD+NmA3pgG+qNeV4f/mH7A6G9tWnVItUiieAh+CehfcrjHrIGlY8+7/wdvXrblCeT82yqdhygjzQwxmYfhKNqA6cxOpmqReGT2tmgTz5fZnBW7IxeNfTpttgCuDWZnni9jdF+ZT5wr5Yxt0wAim3B3X5HVn/nnlm72/qk73dNzusLtdDRs/fc6ThdRdvG0bADxXPf/R2dNE+0hwFfCzp7R61T2tDi+X2vXxzEe/DaCABUpn81n7lm2q2aROyN1ZpS7s2kh+1FY4RCjdgQCfFu9TSSme9qhldGuAmwmbUwBQuh8yjAzb3GI5S2YF/ynE/j2KljA10aivlYutdaHU7KaLPh45Yvp2JtPlT3ZonI28yqXWSC8VCfoTfWBupneRqhi+D3QG/BNh8U0FG5V0biIAFp4kSRZxNhmTMIA8/vOrzZCitqLvaamIhcaIJzYOiBzY3ua2ooJc2AejfFkJDtTV8Bc85yYyNOCuZufk5t+DHGfLVPJfFUK3x76hOAUK+0EwQLeaguuPzs/Mqk0ZUlYTL5JsHmKIOn6d7qC1xgapGpkp521WQeQthk1Nzn+G9CR2aq5zGwiZnh4TmU02KXFIBqxhiOhCuB2m2Q6VGsmCUnORT6qIH1A5lS1EAB2O0BHVJV48oyc+mW+I5r/jvrrR3z7UeZ4HMPHNJ8oRwN/dvYQlhyen9E6k3G6Urnj4fRc49g3jHauceyHOcMVyBHbkYFVz4sinRRanKAokhTyTnmLkFL67fDZiICOJpTzFcuTKnefS463s5899XyBpCepVQk6wNu6AYADq/FiQJyiY9yJJDsXbFPZrQeHQb5CQ6uZMCorZnXTardSR52WX65BwG4tggkRckvdhanSCabcruV0C3dY+3db4BW3/gMAKArMif0sG5yccbsucHbV8DFib0Vqw73T/uQN7sYqYIsQq2EJsCNwQcN7c+DSfOwOwLrfsh0AQFUt/Qtf70M1l/5NRyW12k4j71DbfXk8QAdUR9MJBZxiq0wneK6bxjwAgD3gICbutkBFZmb6QC149nBA1/KGCUHt1ECNxIn9jerP8AvpONJIpv+qlX7Xw8z//i10B2C/OPq6n8CxBnIFTWOKw6Wugq68X7zfzH/2NBWh8lH85FqptgE0g2lm5+b7c/qFdByphTnIqfhkC/1IMtRxar9H35STuzhfPqFW5hj9+y+oBfG6Tjwarb6vEF872DuufNsAam2OeTJ3CVzCC73zDO/nv+a76mrj97TSHqhWQcQVKrq3rhsAcP8aQENFXad8mtm5ILXFXdJBzXyFhtyxw6isGGrInRKqkrgIuqJFtCNiwlJ3Ic86gcupXU+3MNnavxmDud36DwD04kDtsZ8goCPjNmXQS1M8WNAn9jcqYA5WAY4LYWvafpyPuBzfNej8UxnvM6LirLwlO7w92r1C5BDwJrq3rgsAOI29iP10+L8rYOp9h5H+teL20bjiIdgKn2dvz3oOoXxhlIVP6eYiqKHlTKMQTGNLAQDABIgrssW/htj/gbfFvBcy1LWidCDZvV0HAFBChLHFXMwrvSQRCdN9gbSmeXLu+6bSt0WUK2vwl8O/i4O97BIHY6UqfM9rf1zY8P1u5xxlEAjxhblUxbafvDqm0ra5892L039HvWU51NFq+3aqmhyAprHnC02dIDda6w9jbz5+qe/pB/F7z2tQorAnfkYzXWrYvrTD26w+ucUqG8rkW1WM7q3nBgBWwmqGUTBMKTcZvCEnFrKknKGVdWtnDeZSvHyFhnYRYTy7YhznVdil0LMkTtAoaR0iABDRspYTrHoKJjrVIuXc9act4Gngbf8AgNaBbIj9OPE/C3IK7aVHLFQuKuFE/U1RPE72io4MveeYocy5lNfKJvLKvMB3Y0eIvClzkmpSiwdbTNFNv+O8jsZOGIgrx+S5crD1JDWz75NkGPUGRoT42rz6bjdPJkucCF3175QhMlhFYPAYG15awlXTEhVchqDg/ogJLvfwCmrbLR1odrLN+fxyBASHsSAjHBgmGlhOMLCUWGBqWD2YSGCAQOCQXvnczE82J/PJBAFH/yQ2U2EigDUEAM9DQgA86830pbjAnjrc/XbbQ091KCbke+exoUTYdqvLqsl1IUNgMZGW0MVQSuolYEULgsWwbgoAgJvA2e0eO+El4MJOO/aYH3fJtQmExVUydp0Kura35QEAxzHiJmjhUtU+3+qnvXk9Jr1qfiad0oeEju3TMB1WpBPoDGv5QWq6oIKN61SMRUOP5TOhoEmPXTV7S1Dx/DG7ZzTwzjK4Zuxwz9Wfruh2YhrbXeo0un7r+C2Q0XmAa2At3psDoxm/zuOiHe6p+iaUqpkGT/hW37xVNwBgodAALyCWNzMGdq64u7wKOG8hAFJjgSwAdZUEzvlboVVNiABAy9C6wAk2NRt+ne4Nd6/y727UGg103fgPALgMeXqtH68tnIvRPA642IL4pnXZEC5P0d86imuqFaV4rCC94db7QacsqROUUowGU10/tx5wmnUPu8YjV2bXEzzywsa8x9upRlM7AJAyHg0ZvqF5O03U240EdUHWSkMCqWvMAwCcA89RcGi/ZZVgtowMigOuSIK5ZdkoS4KFWANqJMFWd0CxvCTnt6zBEvZw0rvRpD3OZ2h3eDuVY2jeURxezBpU9DPkPx4NvrsmTb92ePu0AmMDaMrR4vje6q00Ih+Dfkby29Sn/OxwL5WHqFk4zihbGXg/H8CXjsZfoyj9cod7pxUdG0DNwzFH6dNAdG3q9UaztVYqEe1wbyU2MUaVGMTdIbq3rhsAGOv8Q4O8uVNARspuOhsvyVsICl1dUXEdVQmKqZKoFXRfEyIA0BC0uOEEq5q7PE/3hlnV/s01OK2t/wCAVgbdZD9xgb42mkOAAVXxxM1U0aGh9yypVuSl9AJf0vvwoL06FSil9KEYczv0gJCfwdL/TIL0h/wd9xR39DisG443up2ByIe8is7rbQAAOp/qOG6bHrig6uQ577yV0pkjqgq62zDQ6inM3IDgLD5/Vwgh05UhVhDCtuUsYhESWsZmAgBgWsN0FUIF82WCSmfHJpJs7UomAZgVk2vYAQCyhRixmNJYgnGZzAmAMQ3xxMBYMElKIOrBMS5x4NOPXlV3zJwCderfSplMo9RPi/uqihkMJ5j1BBP4cmYEme/ZeRC/grqyy/Dy0fhcD17BO7rN7smKVQYvs8kQALgxgsRJCrXJFtrsJLpuDUM3y8nJk0h0TD7fOYePf1lUlborUAIC1e6QQMUAehoQAQAA+F3uk8OIAomm9qr/pr1p4ur05n8AYADNQfrPliurultqcxt3Sxlv4259jG6TLym17gxo6RMtDF6wvCHmg4sY/1Wzfw7yfnb/5kNX+pu8lbtBEHP5/xdcAyVI0kpt1HlNL0adkYsMTajCMYwPRiS7c3QHSbGMf89afhxxFGoeK+PHMM9T1R5QZBzudFq+XrztdvfMG+hXgCxiPFYl1TakZy3Q5iB4sRCrlOGl+1gWD2KZU0H0pj8IwqCPNTQn/dLFLAQUu8yus4kOk2U+7F5rDJ7iJdgUh/A0q6cAAKdNJUcxfF7pJThWvZ3XHJrrlp2iCohCb+VVY1UeABCkV/0QWVkGzs0LuVGtjJlAbq6Vba4gd2Q1WF1Q2tsVFVXwpwsnaT64C01jJV7ltIIS/XF4OY7m8YXhL8SO9txvG1/0l89w2tSgXGIPvRrRg/m2ux3tqXtxxprrOvt2p6N7qwwBgFiDn400Owtw4M3EAavR53en1g0J+qQLg6d3lzoEqKuTgA/glBYhzLnCS92F0+RE2wtiWfnHC4Ilg2E3/gMAI0HbvfrPwoFT3W21ucRdKOMl7uLKMle6oRSdwHcSlSFUWp2kr+lFJExZXRICCbOdgRIFpXbb3WAaSldZY8EKL93Z1g4AhFVQgG35IhZewHTbRTw8iSytxFVRJVilKQ8AYDm8qkqowMRNDciWmXEKJMGzPQvSyxtSAmlUQaXiwn1yWzC9wkf0mdGcIL5vo3d0O51jaNRQHK6KGkyi4Ia/0Uyv+TRxd3T7vLJaA2jA0OI46Gmwyuoa6T0a/F3C76TZ0V46D1GfbowKEjx2LfQdjQV2v/ntaO+8UlsDqG83Xrc0DQas48yDo2k+8fZmR3vsnqRUJXzrnLA2gQMAafvjQ+3w5oaACMoum+uX5SxEfXs1FsUGqlQnURbQsRaBnvH/l7oLXVsqSuT2oqnZ2r/pjWyog+lq/AcABoTmmvrPlouaunNqcxt3Sxlv425d2c6VXhBqXsAdEI7pVy1xObbp5+ooQo2G+i2GL/T3+R8+b096EyUy4cr4Sjs73NvKTWCiTlN9nJPDiMm/4kVw4cGjj0LpX0VeAzDnII+Hhhny0gWokHHEEQgEFhlsbgMqilpqMR2GHVIf1zmuzfnDz3NzSz5cwBsujr76JAGlYuMZFk9nsiwhPJ4C+wjD3IxkvKJyEx5Ut39n/YTK001ebWf/F1wjwyWFHMrAVmJYPX1IcCV8BClNvHQVZE+RsM3QgZUN5hWzGHoaXiGnekGzeW0Ia2BFMg/UjjD7OJ2rqh0X9y9fyTUeHQlK1xqy2wAuHyXtPQCXqbbtRADLfqfAzXAl1V14Oq1lOwj+ohE59TD16yV7QglHrJVNnLMfop6RA+IM9Mhx3d8s54IoWy6cMGh04HdfV0u4u7GzhQAID9E5EdL1Y4sFEmeW7ahGPUqgEGByhJZ6AdqFKkBWX4+u1Feqa/+2VPhzI8XwfDruvn0ngKHJRMd8qJqwA/56DwuLX1qSE3ylVV+/ArjngLHM27w2MgB/5ZXqke9scZanRDbG4QZ1HEPfm4/FOhtApcn4+uadmZvdDE/Qii8fFzp5FQJgvGhKR3ZWE/pmm7CFOrEh3gzYjhZwqXtIhMOWGWmecpXBSABwP37cuY/SuVI7gPGT917M8wRFm0tyOm33El+a8XPpPooXeBfq+OX6vgS3HQ9tLslNRfee3PLBBkoP193D5bmf7vjuYap0SqLJvz7Sbbg9UtVMawrjhHtb9u69s5E3Ku0mtHW48w7c/QlwfRYebMhNku/gHT/9HjmIZE3qOh8S0DpupqxRM0ih42GEbIlHW/2mHt5lgmqBxrUqF0TSRthUWvuB1ije5Q8wgSg9XezgnY7iOutig1Ni6vE2IBygtfEFs3QlbHq6KbNJ8D/Z9LD81nZvvKE1rfdG1ALhE5rzBUABgZ4u5oDXiG6HQbA9gNjJaCJF6AS4T/sBaRMS5RUROh7YItZppuEd9fzx9d/cQ8kBCrWIHXJSOhA2ABHVqBKxmBiAe8uAEfVmh7UyjW3mYfFWNLlMRj/M9nJG2Ly/gHbbaIx0JlsVxuvcfJS6wULlNrCOqAos22zsbjKjssmyBk+hWK5RnNksLQBDGJwI4ZxhS0fCXJ80QH1Of+lp+d/swORmH6feYg8A6HOxlWCno/jKTKyIxfsD1Q6OyX2K9FEKH4VRm4lS46PYZYhR8tX8SG24aI00t/kos60CVCYVC/BRVuwA1BRTpCKrK+gFU2m3AoRjf/HEcgsHWaXZnXZ3awDN3lljW2u8ufb17eqyw1E2kvx48keDIUUb1KvKiVel065aknRV3JMYZ7d3XGsXw9Qa+/q+yrjsgRLwT6X0XfjERh0eUtc5YHjl53sle33sPImXNbqnf1XOWEXOnAk3FbSFafkWVPC26G9Z6HK7SNOCrTL0BvINOvfSu+EEArtFIahilrakKZYqvgNb6bCjQLR334+2PsJtgIdM/KG24Gts7tl8fzPxbguX5Xa4kpnZXEOsSisvz96pjlarc/IOuwlxGzf6BniMeU0XvZ/T7Dtzwy5ipou7CAoJP00rJrXGOt6C5e64zoo5RBQP4I/ofucocOHR3VMDgnTzM451wiZX5zD+TBhOBx3P9mK1o++yjcbqJsvXs+/TWpBCgH1oPE4hbhr2KhVmgUr+4/fE11cX5eN2FpN8BMzo1qnBw1RUq7f1k89c96j4+JMWfsV/7U9dsTrL2FPpB33T3i8mnAKQvgYTACAgMLTuMTNwgrIogcr8TxoEK+k6Y3chWHN2AZuNYF3bSGwZgEuLYC0311YiAADCATjR4kAj6jq09tfSAA0cNQAEALkBecV+jAOVVZjpURkZQpeK4uT6SxK6zUoTggNrBTvZS4CEo9ID9iRhBN61VWBcNYRbi7fKmd1YeKttNpYMb1/dNEAEAFmCqVN923HU3tv01NztgukMJK5yKrx00kwIAHABzKKNmyLAUGpmN+6r0sbg2XvFdDMrBnE4MIMasYOh9eOsqhehGIc1r0GGxHMlbwONNr+mUDeAZhnthqOrXijTyL6VhklfWMspUTev/2uqdQOoq9ev17hetISbxL8ceCnd+VdeT6//a3F9l5SqayEo9sDmoXsDmDBvALUaTAAA9QJm6R6eGK6aRQlQxf8gMMWmXRmj+qyMM9INEJyNlqTCkUttNS3cLteW05MvitthkosurhlELZYHyPU1AQQAOuCu9R8IN1SaFQa4K4iVxLKH3C691IuLKhUkuoeFHFr5ZxXbOqckIBsz0j2apNDP1uduzu/7/1C/b8v0cMUIMkiAKa/k8bCc7GqAA2vID0GoWIZBFYfzzzkIfKP/hQ57JgHOtTMkAVZYAQ2APaacMSC74QMAcAlBbVks9BTipP7XeSaIuoYt73hEp0kV2DmTZc0nab8RkItT1s04NmdTIWAuxUk0MhNwXdhIrSNJlSnHjGe/irmWuEq3xLaYCnn8uHFfIhcdGBZ4wOUWKLDD16j9tkSPo37E36hTKD+3PMXDv9zjmvOE+7yPL/Sb9nD8kUZfZ1edTDu3fXryINhbg/2lWaXO0epFdZSFu8T76MbaWrO6tgIU67S9LSbsDZBigwkAkBpgenj6eAB1aYBy67sEO6g9I0rAHmZAAGAvSyKEQ20SBQBUEIVhRDTlo6i8CyU7WyxTbPnEo5sorrZqAwAot29hLGaphT4a1BV0uYXM2IguajbazJYNNbWGuHWrd6VDNH2ypNo1xF5yoY/2pHVFDzJu4BgVKRr3ujhRGg2cXkb5Lk1feNfQEaiVBiXDNQzLN1zwgq4Qw3vowCBGV1w+dgDelbX9IQA4qETXSct3HOJdj77lOMvtRLs+beUqjQk4zGxT9XtbmEltc++iFa5928zhT2H79ZgpE/hTuJPFse2SP0VYYg+eyJ9mn1Qj5oHGn8tT+mLDRVPu9kqRY/NlaG4Z1/Ddr+efXsI4z685XPE/6lehWK3f+lGQcMZMeLaAuWkwAQC2GrhuTf08uo4gJf4XBUE69WdUyWF0AwGHqyWJONYmUQDABzFoRsRBXG+N5uGT3apLjK5xXAPPNsQDANMNCqu5OKfzOavQFaMZHWhBEWfTnxMJbepSU3EjPwHhTCnXa2tvxSuctfI/GnBrX7g1vOticNt1hDcEw1G8i3FohBfxsVsyuKQBIgC4NXgzpu9uGdxszd3dQacprnLZtIiVslU1fFnkc9L59wsGVHA7sSVfQo2VzEAS6qhljySh+xjETIFu1ohNArUn33Q1fJ8x+OiQi06yw7k1Q5s/SsttAG0Y2pHfd8wz7IPvYzdDbn79pXWoyNa/L7em7zaAim09/w4noYE/bs94Q9Jfp8uArtS2X8dnfXzNTQSzuuaCZeu0PSUm7DHgQ4MJAOALMBWePgrAO/1PCoCH92RbtWfsA9hhBkQD7GFJBHHITaIAAA+iOoyIRvR29VGk3ywRJcnK/AMON+0BAK7U3DCTFALUXJg4tziz+1W41WpTwuUo4xWOlrfpghsXrh0qLWKWu1X4M/pZxOUirP9F4XjcUJ135Y0RnP2m8j3yy/Yrfy0n93lb3JcUfmQhIklnkUkNhDTuUJa2mK1Xt0L3wB2+ibFn0/jBl+ieoE7BIGhtxGMwNzUGVaQ1eIRWaFoQgaB860tXZhHweuRyCQnkHFZ6QRgr4QycYf8+heARbhhnGW4lE80MX3L9ArKEMfWGCQKJrgqfMYCSSO6+a5soAOolv9LiPp9aabKlSnXccqts23OlVbhd/M+9h19Tgh3Wzlw3sCtWcCzgqg356RAp3pYPACC1i1AMe0Uj8DOjKjv/PetpZviIZiK/vefrMBtZAIDFfk3toqXU6YXaI5PZubIT069Olymunu2Rx09UB88N7Uu6U2+rgO0arDdWqRdXpHRZ1ZueDCq+Cwac8yZvkzeux0g3MT/6d6VU71oUJocCgUwYSK4SAbBhpD8f6q/ELyU/+7oemRep+fS0JRLsGvuPrUNwz11nPB6CV2sqeRl46fQZlbBL0JAWCtUOi1Brx2K/CgEAkLQgI9Kxooz20dt6k0T0gRF8eb1G0x4A0LcVxEkWKWEK3AerukLgEJgtTSYOHgLb0RDjsnbTHAKLkUvhxvgllqRe2YsubqwSy6cLEOsoLm3WVteuCG2O3YO77jli4bNMvq7Lml58luXX81nDZqxajDOzjzRjjhi/WlhIsmtF1YubdjXJ/8Usu5lsEl5rLz1QzzT8m78CH/MQT6fZsTYWzs1FmgZNyLaOgNiVruRQMpOPgHGaZ/m+O0nUYbMCEV9A8j6T4XEsr9enNg9oABZa1c3M5qprtC/EeQCCmfCmnH9Ec28bBTAzaTwlOV+GSb9OpcIXpxR+frgEJKY4PUnEqn/DUMQsJNHmmIA0XfgC+TLlTuTAObDmAtecgMZ/Rjfxg4G0QHGD9eG6g+cLI9efS1+StEJJAAQKNA/Xd9bDa7y5jWUW3nP5YfzH9Jy/4fBAqy5wHUBRVAvI5LqEZZe01hqa/zWtmGJX3gg3HnC6upvUf7LrB7bCTAlAjmWF1eKNgTwOcHU15T+9BuLVrtyr/qLmEPMQImFDrwVS7xcDX4lfoMTvOgv8vza6ySljpaBRTwtdgldUyQYUKGL36FlGDrAw00mLkwfipIi/FfGSRbi8Z5KhZqwBUIEFRg2a0YbOL1CGS228DxMgBOiIChnfXTdekSgKDDhIgA5thkIhQBgW2TR/xcJaS/TAdq2xGBn160zYwfnTvyYfuAOU8nwxIjnSzqQqcHQHQEdwRzSJ87x5XaMzqRsGIRHN7BjTCy3xtc3k9pMc57UXIyTCdpqTxAN8m2LVTEDavc8dxIOdYBKHLf38Wb03j6Kevk0P6srlacC6Es5RTkTkBF7HP8r2kfkmnVvV8/myZiRCpyjMDIIP/4/BufeE7IO3CpGdWTe1HFsdvkwleVn/sWyRQLnyKjT85wwa3ZrhcIAqtCZ7WSybtVm5xcMZ6eVkFJP1Mga/5aal4Yedi6EeVuBZFCfJ7KIufOBO5vVcNXIr1DNaFmTXzxtdqaqfCPTCuIStjFPYrLiFXTiOYZOWQcqfewwwdNdGO3jiTdK2EXYMvXD8Bwd8rD0FhJbHamx27MYW0eW2MrZTRUF47MfOFAv6fCu0mz9eZKeJH9mSeJLNDEZqx5vs+vEnu5F4lC2LT9nJ4lWsJdhoBnU8i79IIM1ZP96V04cTbzQ+4UZit6Ye05RACNwKJaRWYrRZ+mQorT9yJKI0WWst/OtwSq0wKCvSBTZ33Iu6bsKkJ/XhzlVyFzxS9PxFkk6OW3KxChseGaDkQjxBzYjTikZmMJ8oZDIkDkUC5OLF9aWSqnirZaJfYuhzyar6lTXvY4/TissMCvGnbLY6LkfYX7V2ehcnYtLX/NP9YP46ItbgXt9jnXF3BZ81nOidWmmX1k+dYK2ke5bRYnGn2koIAEz1nCbe/Ck7cldwC/2Bul+XGWR2nhojBCsbuYCQ7QcMAFK6y/OZYyutijbc5ghtQptvz0uwGiUf6ps3/jXTe4qEIZzord8rtdqbPPUEOZYwlA5KGDTbjAIAaAGkpBii92k8FySwWv+jMsw02o2Vu5c9miu/C3XBJlBbRCXsEnSkiYBtLYLsFN2tQgAAJFePTuTOo3ofqNo3SQS6Rw+lNO0BAOhZM6YySWEv+g9rVTdwcWqTjctSxmxcljbFJgER3OKVHdSeeEWYBYqaoj3pRTpcNK9TNne5xyrCE4fsljS7TVQVLofsdjT6TZlmpO6BxdP0Qt2rrScBQHMKr1HYajcAtCCoqTJ87STOnc+mNnqnPttu9NM25aarzOpPNSU1EwIATIBxFXJdKHxLmLKa2bkTAvJs7EGIjkHgGmLEOjFek1DzJifQ+N5on/un6B95z/DhGopvhoCvgg4BYkddkAajcCC+EPc8n3fp1zidEkBQ/DMzf1V1CBBQULz5MHFLpry2jvTk0H1I9KoeTFD8j/7NKdWb7bH9ywJuo9tywicjSgAmjPUoAMAczUrgne4hlut6WZRQF/7XXKbc1fTaqzzDgnujG+hwespIc16ilJzJOlEAgEpuo0TUix1oyC1WJja4ql63BwBM5bWXSTrIddmHNQxHyM0VmW3ILWfEfZOhSG5Hb4twycJ1949LMetqC//aEi7FVvsWByYqHth3NPyNzbwNv2rHOryTA2SxZMhE3y4jYhd4WU5208DtNJcAAHeFcDdUgztBxXIOwjT0v9EIeS8Te1SPceqI5foUcdZUJj03Mk6bUflyVqqcEEa76QMAmKubOEQasVmCjgTaXTWQggdeb2UBAOACRJSYC9nr9nAyEjExmeSdsm9FkSUZYopte4v7RfZzg31JuBFdY7oZjoHzM0UDT5rixKan+62cZfPilU/Ay6a6sYa7ia8q4ei3tljtvuHHoViuljBWjOULNtWMAgBsxO7p6iycgQY4AZSpPdM0oCwz0BDQFEvSBNhTue2q2vVl2lLo4URRZqUY0UplInN+Affo9XZ4AIDTC0phP0pBsmg0lV2gV0gem2IN7igva9MQ0twYV53KL5hYJW3DnOhSU8Gkllkp5rMG90Yu1u4wZA0nrBqlPXt0WjfHfvqGiGV8zPFM0fiL/JM9Sgez6EEO/rVACKx4sS3nB58Ix+wTS5qZRGKEmDYGlsdTAITEW08CAKwbD1lutRsAcGUIixg+lvoMEw+Ipay3Y9nDkLsyFAar99xYgplGQgAAkyJH5HDYlgaOwmY3yh9Xt6EZI6slXJembeC6dAgTSl91jUU+/MUxb4BbAuH+su8Nh/Pp2qBUonxe75NWDf0d8FjowPGntkSfglqlNh1391iETJjFXBsGlsNYjQIAKNhHO50ggUNpgJdFCkdos11nugZCu20X6EiErpoj6XwuZheuXT3C3ff/XIUAAIixOVFiXE5lIhePM9eoDQDweoUgXFBRMfvRHKThtJnaLUiTM8W6XuUHUbNcSgfil3TX6tXdWWGLVXJVFIq7XcKWWvTZFHgX2wvdL7JZq8Q2Z1EgwURw210KhAiXxUYxEJFYPJQBEbNbTwIAKsIjLlvtBgC6F4/iqb4kNYYHlGrumIFU1daKQIrD2kwIAFycklpNXFogygWNMlHCNTRtC9fQIWym4co6uJgGObY/BwOfRe8FWnFHfhp/Okr0SSpVsr1o8rIs0Oi2nHAZKwcshbEWBQC4O7BX+yBqJcwsSue6DHgVrdwPam312RXOjm5wwNnTkU4I1xxrc3ODIrpMW4oFTpTo4I/f57DjPa1OVCEaK60NABS1cjsr6YTLnha+NpawIXMPu70WKhA/OtG0aOs05rWaRXDdIQl5daW7OZ60u/yo+4yaPPHSNVp9HZzuIBzvdvnG5JCa9eF+I3mqHhHKzaAEADbNztzdAFAHFPc5eySshpu5i5/nHRQoOA3a7MbNjNB8Ivq1PfwYvHLoBf3jP4UkEsEhQBBAfjM+wqxRvPR+so3Rk0+MSQBA/is/mocbe1xt1J8OT2J7T4axmC56oFIKACBgpHqjKgac2MKO8gRaDog0aCgbJZWiDhZM3LahKhvXFimLBCKfSDCz8QBADlAF5AXBnAHNxwVGuDU4/A2aOfYXvtI+wozFfKRG6aOrUXXVXhFVcjj+5rHoi90UI4FcNxaZuwrh7oSxxgQAOAHqogHlJWrOQVgeA9wxkLnamekBcqUqhwtrzikUpmLKxwJ26v/xru/PRcz/kVjcA3Rl5mOFjX+lZWl5JQGGWEt6wKuqrbkTeMx2FgCA5QJnoLns5VZWQuJqVOZltauXYOcMsarKQBAq7v4PVaqOGWIDWf0sPddH+qUat64l1G4vKkajQuNeFSc4+aifya+Ec/wZIhEkfLq29DRv+FV8P314ZJy4+59wPXeh6p41/lTcHOs3JowbAHdhrEoBANylpt4Drq8EWKufl9QrVZ+tIv5ytnWDDc5SR9oDXLjmGu0hTU5mvLBld/UxOGXOOlGun97lQz33JokQruMK84TehngAQHQqqZIbYT9GjigcGmU0Ta+xXmSoDLF5TQjwKQzfDL1VnqXDHvqwfK03nD42cJ410gVCUNW8ywGoumueejURso3/hXkesrd9kDJmm5jLPle5jrWADtxuPuxxhqj4KsXk05HOeodAkcOZmq6n2+/dt+K9Z+aEKr3V7JMiqbwmmOG7hLH81P2npvM+nyCVX1Q2cTYicFe97tZ8RNTeGzRBxk1vod9HY+oNrp2rod3boIHtgfIEhZZrNjQihgwoKMqKsE30MQJaqcITLNk96ddY9D+Rk0dQZCDGENVDKkE3zEd+tGxgqWLspBkrt4me2A9CyQFENxMmyJJNAhHQBC9ovFANQluAMVRY0Nmmztp27MmFHIquhxDXjWHn4RqLUVbvkQuKBpl9R/7miumajfeNHAFgzMwDVyuE7eaxK4NEleL0xhzbCXZWe5eSR8ENC7KkuaPHFaAzDoEOD+IKq/PzwSVKwBlsokTGMvB9Pq+9W0SCg2sU9MaAqMQhlLgRfeQJ8ZHJ7QAfzzb2cnLhchXuNEhXu6o3pJki2gBFuhCvE1bMyNVphUNaAZfdoA6vTit4GMiNRMCfo/EtsYcUtBSE7aCXCs701MzTDahfE9rFH0YODDNQGBPnetKAsPMNUoPC1WDF57qlxMpwe58i23hGPZGRQ1HmaTPGBjnugx9+mTWZDdbZlIW1fjaFNQqI3BxMqR0tnaddbdokhZYGanXBzYrXYqjICRWbBl7HObGegyGukNooFD8bEvQUXdYwI2sxmV5U39jdZQnMCciZIB3iMnV8VAs/FdOoaH5yH0Y4hMVPzNZkoiRwrI2EAEAuuZkYjmoQbgaOssiHa2WCIuTTWNkBGvIN7YdAA03XXh+uXq0fGZa3fzoTxrgK8NWdgCUJVsH/1FIllW66xtrkpyxX89RVK3cW0ZUfmmep1wzci/04/pFUVxUkTY7WvFEF2+Tnr7p8zmPUWNdv15b/RC/+CR5zMqG6T26ZRYpv78vNmIQKQWer1vrMW4zyFXooOtGE251yu85Wt9UKxM4wYfbUi6LA3u0oAAD7GKsoEMvSmUWoTP6nunfAWOrfbe2ZS/mnAK5mBq4ALrIk1+Kgm3cBAAyF2+NEUzfzwabi0KpE7esfeF9quCEeALAkE6X/WGvB7NQq3IymtaCowtiSJbt27kmMqby3CmqvqZNVD1h4SFRL1oWeuiHnkCjAzkT0Eji2RaAGgBPVsLowyUmjfvf5zUVjlIXLdW2r0+DtbicTANRnSE23p2KMpxYZgTi77v0cuHBlbcW9eaKUMNrqvV3z7yISFWdWJ2PTNKiR4Q1eWs2y1cZLZxiE1CAL0rPJ1oQacb8BnYa3zfmrkvheRxNiYvyJLtAXY1bFgO2VcsKHuxjYG96rUQAAkWw9nUaoJCkHuMJxy9W/veoz11r95bh0AxeOS0tyIYy6bbwAgBEFlmgKVky0ROFaFE0mTmiKyT0AYAeZFSalbvHjg2rcmmdxJg/sjUMwE48OMbXUDlo6es8dHmOLfOzfPiOVRVc2bLgONacTRxBQEzQhioWRtq17qSRVjQxQk9sGzdxcmQCwgZBjqkNDCYJsICMicJd7kUxchQ3MPn7YZTR0y9tp5QkpxL/p7A8gxBC/M3X+Uz36UyFDs8tNYxReSO/dSTzVoXXabqHFsIEppQAAA7RUN4o14C40O362LgtojRUbaEFHSVpQJV4AwMDURbugs4nDHX3JPQCgU5YaJsWSP6LpZU6CCMXXDVccINfTd1iBR3AfaPZvfE3q+d9oJu9IdvLfP5/JT+0LvKCj/6u6f/2/7cfxeKSrRZnQqFkTAABEhRm2RqGJHdRSDbiFXrMjXgTg2E8VplUpGP9M5Ty3m/N7XjeOpCmefWb9Yj/3iT6vMpMDAMBoXdxSWhk9iYP/dw2rzyY90wIAjNE3lct1KWDQlzTgzBtdx1peiYmptbxWTaRm2VXsjYIy/vPqtkJBmswPHL3a5/P1K8xD8BstJ6hP9XkYIbxTaMl52pkUAKCKRau4oYchioNuFEeAm9Y7SzeyZ64v4y0NC4PRIln2HG9rMKpnAgjTxPwuALBDu7iq6DiXlgg9mUjQwHPxAAAX6kgX7htkN5WgiqpNLi5GGQu4WF1KxNWp7wjXpQkkUKuHdbVnD9h5yBZsZ2J7A5CyCGgYoGKx0Vy+m35z0Ri1Agdd23QE8no7mQBA30I0tz3lZPFohRFwpo1lkgMXWMtVZDWmoiXU1EZCAGCqyq+XsHYNHNN4PrmYQTPI98H4QsvZ8MH49rwLOQixoXoyVTGZ8/NuqMkFuKCWh+WWYkp86M0l1csM40BFs/vH1emc8PIzKrnH1SgAgJJD5dSzk9NR6YBqpq6Kzzxz5V8f2r2SwRjaPa3pGLYoAGncBQCwDU2BnGiaZaDlVKqxKtF9gvu/LyluiAcAvJJp6S5WJw9z1huKUZre1W6UrKwhdrqZvCQXao5JhQVmp3jdrkJovOIUiv9tFISOJRRL80KfvlQJxb2CXpoonOaurhLBCuczwmrXnHBp1GhMwl/aHKNw645t9QyeqrSdTAAQuiG1uz0Vt/HUvq5B8Qo4NFtxZ4mcIiZqKAQAeJWmnC5WxdkM0lQzagZp69kig/TqEDIuu6LMRzLTfz5Y4ew41ZUaYkNLdWk52sTQ/qAgFq4cMh2lRAoAoHUOxx9t5McgpXZJwFvRoK7uRnZUebMamau7uDLHM+W3U82qm1YxJuqeGV4AYIBuxUV1QVsrnjOWeFLdtxpOIR4AGC2Sxc8uoFAWYVhzCpVMi3xcXBtZDweqlkJDs+yKPRS8Xx/JMQ7YFsE7hJZdexr8sBrceKL6aEkCaEGF4RKibT8VSjL6rm+s7HakO6tQJgCUFgmMOjpY2cFVX1sCxe7I1dhhQ/0FiHeIBRq8KCO6kNA/F+iTWh9AtCGeT93zhieZP9ZDzHQJlcKRhgT9aUEPI4YzxbfeN1YpBQBYFeKPxntgkFKpBtyF5jJoBEocGIBwGqBLvABAgIqLKkOfe+ozn1jcwmI8ABBAFRAWBHMGNB8XGDHV4ITvfSfiPWsN/45fWI3ncyqNFwWir59qBm8ovsfdv/7sun94ZJZw8yjECbv6TcvsKUK/06/VBAAwO3O32/CQ4E0BA5rzXvbUPX4a2H2oqSpzHYWyd7/JDWU3k+LMWowH7xS8SLSW55+u+WvqqJ8Yk74qu+ms5aP152UPyo6y7eqAHFu3oOFtUzQktPLlJwcnJbiGAJC9nptQVJ8KXtxU5o0aEzEzqh33Rkr9xc9jwFezGy0Kir5+qQVnMLGfzDYie1R2lp0N58lIT4Bix6QrbN7TP4TI4O/IAQCGgMS4wFjG12RskTzuorLmpmD28Xi9zSwAgIqQxqW7aM/2y8lIWqzJ1Hn56PdLm3qhCuKFSjsv1MeDGx9vqSLglK8ruBMmiI8OdDqsrBC8llbqyTd2c8DeoCFXTRFv0oMWqC71eQ8X65e/zZrmPAkJ1qphUAfyyaz1xbYet5nD0jBq5jOCrnBDODrbz4kzd5RIMmIKJkuE0cJU+C1JQZCwv79I4kXwU2vKckf2FnyOVENBn8sN68mCOphY2qkS1xOQTYRmJIcj1QpUd6/T0frlmpFMnaBJelpg1AdlcFRNzG/XsDxfhawUd3Cy371i9TKi+u7yLAeLEKVuRgEAJGfghWII0Lg2eSAixBodUExxD3w2w/zbNtW4AYyVd3LtojDjwTvVr0dbdbETE3mqJnm3hgX0NWuMuH+0SW9ZEa1ewm6uJ+lqtjXZ7YSEdg0tqYWaxEyblyO40+XbZM+rgTVWmqng0K0N4z50JWxGOKfDJU5zwuUzAJ8NpRbKGm3fBQC0kUZz+RFnTyvaOv3Wk7GoSzRve6PimeiGeADA8s7ylQJY/wk30IkUdt+NTLPD3EKheDey4g0SAYXm3Si3a/RNH3HGfCOzh1CkeKEJZPAJqywvYmpSQLlWZeIUnuTjA3HhSdNKOCnZf1KonrTNFzPEk27qtrPHwJ2GMgFgQDChQd0jg0xrBFcLmmH+W8/Raiu78xiftM1mQgDA5XI3giy2ifMUNNg1M+NAwwrPHi80JBjEpKBVpstsR5SMshVnlyDLbeV5HIIALX4998nqEH0LJ0Gyxv+d2LgqWLFBdSGqVxemllRLh9qQAgBQEL2tpwd7JbnvFNUBRe4bRsisrLm5gJUZBMASLwCwoGKiA+jpYI8l7ideHKzkHgDQpG8PlKKin6mg72AFzRXw0lEIxntE904I+U4woTvp6TthhxMDU4xwXruL3Qdc0pnIxgA7wwhcmUP7qF3G5lpQe4zRdaCua7fdhXtsKBMA3AJda1DNZuDaukETUCphKGGd6Ux1evaGKBUCAA1WiNu/5Do4QaaHVtQJLLLHTtjxg50weTLQ08VVeENGuIffsAveL/3NK9JJ/9ez+s63uXVfDlAsMdwASykAAIEJDR1dsChIwolAAsrNLTiZQQAp8QIAjdrTorkDJ5+43NLFeABAYAsgg8WcAsnHC6h+14Fm8AJHCoDF6U672F3Gnj2J89iZTuyNb5KygsCzqs6BEmyjAyUYV0tXprbrtBq4UCYAUIsGjFo6WLnB1tKeCgW3NxPfkb2N74WyUH8otgj8K/Zhj2zF+xTuT+mBAJGu+M0dAJNCD1hetJ0+vwf1EYlyxf93aph1XseVhieGC0ApBQAAEKlewIpAAMzNDZjMYAMu8QIABU5UdGrWtzD5xAFrNh4AEMGSP6LBBc2GRq+DCMU6SPm+gPg1bf1nz2LCf6f+2Vs/78ag1gb+8eWwPw7YbHv/8H2768E3qoPIBxMNfN52UlagKvOhxzMmAMAsi6nmhmkaZ0cOLlgQgNywS1htuyd+9tCja32AYkHj06p6kh6GP3ys2xcONHCwz3+zBwT/UPh+qE62L9pLV/PsxJG92ZkihYZfaZsyjemKsFStCNcRM03WMluF3T+pi1016hs8Na/JAgAQ3BNvXFBJN82VXAu5dErTXfpSHpZrW1VsSOW6N7aX0TvEkl/G0niXVeWb+GXcGpSyOJPn+ED8uSoQHJ2ipx97AiY7P+4eTSmmS1M4vWLBWgVMKmrEhDUH6NKMAgC4CghHT89002+EGOuA0hmP/56JimuuXo4RSG2toKoAuyDl0zLY2cpOACCgOdrmcqKFigpqstW1iY4n3NVV76Z4AGAaV5v/ue6zJLketTmNmyqMZ4dh8kNJq71PcBfq+wZ3pQvc4WwzL9zgLOoTcYsz5AQ64Iw4m044Y86oM85M7bYuXJ3bUCYAVDHQSaH28Gk6VQ72MuieEooPStlYzyw3EwIAloCDDVz6GlaQEszKzGx5hzJMswWevTXQLJZBbBe0F3aTHXjuz7R/mIodlyrnpwCcYeUFHoK9hfr7up+YStr4YgXGGn89579bGWszTYeooS5M1f5lDrUpBQCggDB6eqbrqSDEiAFKtT6J3zf7wmRlzc0RNdcL8g7SFXUBAIETEV2Q2umENZa4CYey7phLxAMA+9t3Bxm5rCN7G9vkQuZO46cTdDBuGkI62PIdoIPr9RQs32KFk4skWGHpRSIY3kXe/mWFCKkrkSpLbtseMBXInTHCpi8c7aROo6nL5dEXc5L72LFtrnm70WgmEwDUh2f6zKn3eLuZUbYi8O1gUiJwW9NiZTbx7Z1QaUIA4Jzu76ACXNMMvRMyzJRzPA52nK2DE0HoIMbmvCBcr/DsNhgeKruE6Ca9+TCOx1bcy/6JEcNB7VwKAKADINWDchXIAzo3Z4ATgzyUZnUBAB8IWUSQ+USBU4oHAKbb9mHhCKAGEzY7BXX34pOPBZZ8N9gMHpBCYHmSwJx2kbuslrYEr00kjyfdyn8TmJyYJhtvEL4KLbaX2mQlY0Grye0JdxSLZQJAgcWpReMVBNum54/BJUSgJMdOd+Uy3c8nfh3zeG3D5pTKBh7Niu8UFEjigQDRrfjNPQGzT7aXgmubMoh2uhLZiv+0Ezhfcuu+XitoMWxgSikAwAIdqTc7CS1QDLhiBVhQuTmByAwuwKIuALBgR0W99zgLkU8kmNl4ACAHRgAV4CNnDlw2Jo1PvgssBjcI9h985ZvABYfnoodcHhVek7btOXFqV/NfJQIfUkYCwBwncOVBy8o4AwexU+KahxAUTJy5v0wkKnf0cwvRL+RCcbqn7X2HtUneuQlw4Dd6B58qQphqyIoCpsLnByyELHxcBqv3m9k1DSt35wgtp0VCshxeQY2L7x4fH7lYW8GiNNMGY8WOhG/9+kNCPwrM4wjaXltrusfwlyLw+3m6Rd0/rdG0dEtsjIvJKA2uxet6UZJx/9uGvRMcOs0p3XpvVHWnQTCE2M0i9G2wLDXYDHj++JYGXxuEKiaC2MzeAAANlJi5P6zI4p1FpckaVAX58wW1yjILABAdJVnS23bWp3/zXvPTgAzO3OmA0AnExPD5ApI+NHl2znku8VcEnhrgsCfWZqPqZhyExJpsGlnZbTrFApuUwIEAsd5871J6mSZocc3eWnPbrJYVRpH7k2bQS33f2RBtOfsU7uBuCVsBIAEObjDvzhnnLcfmOwmBsH/+Nj44tSsQDdr3hmxakH+dVlZvcLJuzERhN3O6AEAHUguiXB276gTKJabr3OIgS3sAAC/KEwVK4USJVmUqv3m0oBw2L4JddVITjO0EM+kwNzJwZ8EcDHefH0bW+M8JpsD+MgfBVlic9Kol5OBqOWYUy3a0ouBoJyXajnamROPRzia/fbRbni0TAFxcnSGrtu2lJgcN2jROVcLpKce7RZ1EIQDQs2exNMNy2HIFW3Rmmb1oR+NQU2SfDvFqCAgaTZOleSZNF6inksNAYZU3h4kl+H6sGeLWlhOxE8BvMlcAeJwxz4TzK/0DKAp85VVf63Nszs9qHBn0NZ1JXQBAUgmifNDEQLeHHEssGa8dRHIPALiMKhwohY1LOtpZfkUFzQahvIMJw5dWH9ZiYp8T0EHgExLATx8sFnT49ylXhEZ4aXlLWBF2PlGs3IkkICv3rGQjK/euZCQro5TdFjcNsFgmAIiWU6walpzNDwayJMqcAm6fvaHH9y67ycbwgjmud4YADohl5tJGfSdElO3zAzeCqGjbHORImT2Vh0OfTLb7Gfoi6MT95+gchWniVgBIKIgbzLtz6s/cyygLFoEFdG4uIDODBaioCwAUSFlkkPlEc8sU4wEAgCN6CIY3QePjY4OVfQiI4L3+dTuogOL+4orgCi8tvQsrQsknipXbIwnIyp2TbGTlnpOMZOU+Z7fJTaNVLBMARMs0q4Ylc/nBQKZEmVV4djc0c9Tm4/QM0UzXvKOj7knsCgCJT5Em5wCi8PF8jRDKkLQ2bg0R3G8ZPYrAJ6ErACRoxhnz9OvfI/0XpSqQB1RuzgAmBnkozuoCAD4Qskgg8onilhTjAYAGI3oAOmw2+YYsXDYWjS/7YLDD6yVXH88iw/9VVB9faeRxgWQ4h+fuo093U3zduMHrMYS9tVGHHp+tXf/4s/66Qc64BAjnCugH85mL+mbOpz9eyEsGCP/o1dV168ARv/RTTB+WrvkfhZzVm5P1+S2Aevi7yk9nxi+u4Y0jHsDy1WXbb0mK57MC6ZFuzre1vHm86pnW0ff5zL6h8a1RzGxpq0HWReckhAEehKlDSaCz640XRBhHObRGfLWH+9iZv+xZPk8jbqJSBD7B26fBxvL+CWYIZbSlr5q2cgAImt8MEMHl5cUs3pt/YLno3Ja+rxDjBfqsA9WdO/UfHgm9U0AScdECIpPQcUfsaawQIbaClNUixHRVf5VNsX3vltT6Rryzw5PAR0hjG2zFSNwjy78vGlcmFIfi/7RtDouCcOi9AeVJF41iZXmDwhnIa2SChm7i5DSe6atTo1A9RLIMRxvdS8ti82Kptv8oLWCokHi2PVWCLPOsMrtn7LBcJO8h4qL8uHFqFFmZlJsIjbKY73pz4dgvfeqzqC9fFjZmsT3PMr96oBEo8Dw7Z+NssR5aHRiECe1z26Ttnba07UYOxE+KtcKeo5b2h8flrlZ1rMPWxPBLNCGIZvZDpnnOrQH+iJx9SsIW/hErAnMZQkMeYgFiRLc8efXqp6io7hY//4TQObMM+ouARU+PE42pkK1aNs4kNZrjNAE5iyONEvw4k0XZJ4iSjmEzGFvzY4PTVy+wOk/jnQgwkuiRtICGRXx1THpWh9H5JwGxr1xaO6Dv3P8vQCzvMT/YnmHQGLFt0S1baAQeUJmMyCof5+BCBtejn1KMd3HDSBs9zs/JU1MAPstiWsggKtS0jm+1XOQu4I67BMhoa1N9d4HwhtF1JolZbcvyIZz29HMjpZ0HaYqmde/e3/cQt+E8GpZYfDWUvGLiHEGcBYOR0InVd+oxYVAnHWGqXHgThUBWGX0jhRBkafUO0z5VXf0ifQew0WQdTAUEZFvyMgrxEyk7lFmvoGNl2kI5aMOWv4yadUNeOBaNMclWwZS4bTqq9NU70aG0KbkVJd87qcMvTsjs5tIK0F+h+khp+3nAs2LdkIZZ7/NWbeOwSIbiOSgAkRNqI9syb+CTO4z7ltVFMQNI5ihmkPvlAMyexhwsvpkOgbOuEnBqoLSOoNoGwnKQZDF/tQD6hhrOmknWhZWFN35TV6CxcnYlWx9yvdk5at/aF7Mufav6pfyIHW70cETC+6mJO3b2XVdZbDT+4RzR4q7IIbUjP4snYAc+qxArLlgKXbwftR5Fo6ism4Q+eMsNiKRDq+pb0UmGyXM7vPdf4HtZjhUysUu+4Ufrpo/xN3o77Wm/C0v0tbC+SNKf2T4lvqbjpZuguHRQ/0LPCQ5sfNakWV00E5rz5zD/VwkxX5vqwB+WRgqpsC3exU7KgQ9LMjquAtRPtaGvK/N+L37YhHQobzG4UVtQqwwZqdC971msnIDuJavsfU80OJ/jsQhFOeeTePFV5bSEDulR2ZY8IVQ5EA/4PZK76HPdBrM0BaM4AnIjZvuOR2vU+OFL2+piR2aEiztG/bOhs4epqW/mGSDmnsDx64LK0PJArgnvAyB06xpQUizS3Op4ojUllGHhW68urdC6kveybQ5baYyIs3R509neBiXY1W3FHsvdgfUoI4h24QxYxvowtHOvfQkM4+6elTnIg9SLBACHvii1xyar3rfmcwtZ+IwMOTATpTcbCm/CMI+OAUfflfcbM74EcJv1PLJvuJSQGxXbFrJvXmgGmAKK/48WW8xC0mDEFZDFDmDDBgDRHhkm0H/NvGo4/1C4fg84/ptLLBfHTThzNzQclA+hMFHOvE1RUM+QdfU9O+824H37x0DyAwK6OjRScgDGXmVdb00UMSgczbDUfWKQ1cRoV0ei1TsrQ3G9j1LztOUSPMeWdypDn/lkgoG5cENiHVRgBscLJJAd5DMIzDmYnpjoZ6R40TclwMuB9SNPxZjonFUuA4QbB3qrPVQSjyKPUVC2a+VGH6c8clSJWDHt/BKMGwLuqiBFRbC3JpjbXclPl6dvpjjNY7O8dwohvhbuz+JAYMC12kHQ13JyXW6dUrLt014rMOyKwWvX5y/0nu07rg+P1PMwzxCt4lK3oZhzoDQG+lRcM+QAROm7EC5AYz7Oc9WW9ua9B+0qXI8qC2dTbduiPW4iSoXqEjZa5DSgUHenkagUzpl01n1DO9bBKX9WtjNWHYx0pcQvi+OwVViXv3EDM+MDkzwRtlD4sHVZWh1hYanDLOWGNM5OZZHX5qNThMczn2MDD9JNJCqGcx6hDG3xFpeODwFE1LuGMEV688CcmYYadrNT0G4W/6+Ub+zvjAyMeTrqaJDqplCQBvouYJy5rSzXWAnZKNdhWxD1kZlS10bwJxtGdq7EaHBQOGUW+sO/QRUf33XAt7scJsOA4YoB4DJAbwLqxiR+ooiA4mHYqPEKBqkyHBTrRt5716Ezqj/LJt/qiYlMr9XFe/hCFOCbbnz7/5OXy/7v/WYSYm32/Y31eKLfMx7YtaFMbkXv3SuK3n1cv5V51Q2j8AW2LzD1X9OwQftuaxk2Z4xOFno+9Eghp/dD1rpVwUUw5R5uNHsMG/JwVWZP/xdC5tTtVmW29f8IZA69u1Pm3hg0X6T7AuUW6pWUNBY+rmmkBQP92TcThwZ9GTthaHAMzP1SEJDX4iXFEtasQ8Xyqm2l7vCvPJy/f1SexQlKwTsJXd+8liyiAsImCVu1w7VC9wYhkdC5PVFm7MiLuU3AGQOsYvgZZ750SPnXYUOJ1r3hXGKwcMa+RFtvfQYASMzmbijh1hslJW5uF0EOHAdf/O1AaR34Nq0LRP5+srV/FnDO9utyH6fJcPzZRthYu3+6BX1Mo+lu9ZnCjiapu3MG9Zd5x+LUtnCXS2lL7/5e8fkW5AWLghVy6UieDcm8ieS/KO01cQefBNXAvCr+/Erltx9ZPM3n4CNDX3iy1o+edIxKR10xkFGBy/tgN16HATd354Ntvc4Crrj7GbBvFEHhde990IkrYNFWOx907RWwKMVu/2Egv3Nbu4tRD0zgOSRgo7dkjQI3eYU3Vu/cWgVuZiZsX5GI0RjvYG8rC/5by9wBMOzi6GQiMN+LsPzNR1idsnMDxVKvHwUST7fWBW06vQdC2p6l1OUujiqg7isi75agyG1chJErBroNN3x1pSi+7hVO/VkzFNvkyPpHseqaPRsqhmXyD86iR2R1ETnkUDXAb/ahKIDwvnQ2JnSuKf0kq6frGo6tinxL+M1NZ2nz/S892jZTQV1I8XQQbdPgxUQo0dleRNAdGbOkRv/pqjcN7tX5gs0BJab19dmZRV52FwXQ2f5PpL7qBlmVz7AgOkW/EwoJ9T5phszVSWJz+zeMJg1X1TqA7aEj1zVCKaw6Q2m5ZmFnTh70ZkCp+LMdn7GDexKtffYaiGAKp3F9wTMXnyVEO+vxGLqkKz1zDIz1cZoWg3h6K/Wz2zCPDdEFBVLyrDkXvyH3mWYgyut3p2ngb1AvhL6PYb2jn3iLiDW0F6dEIGgBh95nKIXWhEZf9/OuyWeL+go7Db29szLNCiIQ4Pg8MvdK8aW58hT81DxB9NGhmMmfbye0Vjq1V5FXobd0GwsKa3vdQoRJ/elfr5sD7Lec3tnv0WLHAWP1hQOgIBY0VfH7cIGLTKH3QkhUwl/1prK7dZ9KwKvojJHoCswqB9g261c2Wf1KVmufZ732p6zYPm7NvgxuROpniTTOzuisi7a7ePQCtn5vmc3fG9v+/bELwE/vCvBhl0B/RYKGIIc8/+IrsXRmEGFyrATdRSjFpS2qKG+oZspKLAYTZlc4kYiZ4L0/ifvV5u7tKgLupJKUUMN4Qr9rDA62qbV/bO/fvOu4u1soIUGoiSzbZt9PnUVT6i4X+NfgXnMlLRbSJnxra3/TCeN9nJFYthMfiiZQXUfUR+5MeKv2tRVxyyeXzmxSDkirrlIswaW1Xg+W8ok6qwn3Sg89hIDSMJJJ8cyu5kIeNsxaC/l1HMJ5LhlXnU5Kbu2i4dmjgcSUQz/QI9kM96HKMZgEpasGSicb7RrNWVsZ7UGT4nMT7r/KXQPcOUbRsZrzzILO7McWf0+njRpLZYKJwXqpO8/47edBDVtdXmsJyoo1N0omdrbcBowl1Rx2Ae2vmE7e+U45wRh2Gk4kmrvcTIotZxmd6+ydgSPPyhlncr++8/kM2rn3oS43W6LOYqL43KlTGnNfhoZHgamydDvpqfSGhd1Erxm6ioki/pQS9cubOVb2tYQWli3hdXv82KMUgC1xLPRasrcpFkyWjeJTMQnMHBlT7XLaCaAcfW7ThabAcG7v2SbltiRfMorVJ6NY3q6MKK6GgZkoLVFQwJps9pQ07UkKLQNbIuVeSyartBTxqJp4kdUSMYPciqmWGMunlvAckHGmgmQk6f6jnAnvTLmmwjGXVOyaE48nuyhXVORzQcUpKN/aF3NOJt4lf4ZJbMi2xDhacjYFGb5ZnC/K7Jv24q7qs22qQg2ynkHTpAA+S4iCFriMhXyj7GqrTYMTx83nsbr42nEBftX4e9OxdtE1xdcubHqWvbb+N+WVdItWCV/RK7VlfwkZmxuzuIRfppbY4M1xzev1HJL8zU/ngyH8ygQhNrEOhMKeRqbWNpeYB5imXCiTBSQkCV19Qzx0w4MSAWxJyT/r8pdEzy7edayZ8hitYMScX3OW7pM5BYYF75X3z21CsLTFE6rlQwwG5XI4sSTTCOv6mqewGvEbX9wHanzjcfFZNvIn4Y3pecv6wWsCp8BKwetskosegcg6wOueF3NOCF7/43xoR8m6vusefnk6m7Ie5NQLGl4bfKlRURRo/BZvo1aAh2nFMo3bDYgPqTC3LnqFRamDbiszwVu9TGolOe/XsIc4TlCOoGujF8eF4266/SUya/plKZ4OyXZUeZcSKIRH9gPOcULKypRgnGfMRcAX8OdxA3+mFmfWotduXZ/1Ze0/aSrnN5q8SfZPenMTH5F/mbuyJ6s6/T6/t4ddzI2LdNvPubteRd0Hnri+TiNGEfQd5DVkC1yGqqpyNJU8OiOdGL9l1+WMTLh9qmX6SENCI/iixPe9BQDzF0E0KLMdUMQuwQxjlvwyakEQkK85cs2iny+KgAg1MciC2pSqTrx4FQoEwGF5/6BIPgJFseAgVi08nQfQT5feKaF1hr1Seiitmy3u9632dd+GrH7h9r1NkEC4pkzZKardxOZUl7k+aQaTM5fQTNtMlNXMNNcySyypF4jK+mFrnqhy7Af+GjOBGE8sWbliTRUcuCOX48MKzfM2AlCBGAF8mHH6tdzGJibTvlOU1cxTXMs8RtmlTZpRfxCmUeGO0QasInacnWYmOAagMuFqtJxsfAQpTu2GU7wlsKuXN6PhM/MCIwiqEG+U2E2T3pXUnuWSYP8QUqk1zy8aAL4MhszAAsDPiNLJsTEMuMRVKFZVRtF5AP2fLwDVCMPZaVqGAaFQ/B65EQzNxBWq1D7Y/XN6orJm8PWwuwF6xWBqzfOzAUAZG5GYJM0YeIltIyMz1Wo0oCm131kMth2iF6bmoi0v29Hxc5mm302QqGuP2uhPMkZ8qvikxyTpP9jvT3kSETbS2/KUdX7s8vU9/LxOcr4ZQND+JfyUSBaHEF9AXU/aalnoWhHdJdqfq7LxqTjw5akAZ1cZf3VxYgDh7Bg/fioXz5sQQHFiKvhWJJFfmniVZtM+GFSAqqEadVyIARqu0ccJqF+NxsG+Qc1GSSAu3u1S6v4+ILW9eiAkS43JCWFnMtDriB/op2fgr5diB9l8j7ryIZ11xtWaqbYbvy2Wso3XH2lrxqnbTvhE3cLzZtWrTPCktV5Vl6KWz38HFnygZ0c15NGxhg7KeCsxYDAs7gJNa/pWykDRydOenVQ7MDCxglrYOcKRMb2KfQva1zr2LKXG69hVwIudEY6Ep27y2di0ZylViKEaQVopkzXU5a0BuVN0jFqzlGoDXnyYODEBV3b+cGw+HrrotlXQis1aqp1mVRkLT5zgtGgGODGsANXvBV4nMFM9X/svSMGGtGV/xaHJSkNKk3xkm/Vmj18BwhDYa57v0dj4g/EwDPsLd8HZPrCGfi9AGDjbMDfNLX4J5RMbPIEt7XAZMo8cQI9/1GrZxhuPO+l7TPCFdnV1WF78PWcSuM575uC6c1qEGmDFjv3IGSy8LwQHi5YnslltqhPGHxfODDwNUSTNxM6O0wbFTphonFY/o3h2sVayc1dyawwXybUFNquPv3j+uMpww1crjoNIGnjj6hfPLp6ZNjRWYBLOnV3d6hcHYgzfYMXj7ISbeJydHWdXP7tAnJ1mJ6yEFkloRDwIBeBmmtP34aftFy+xx0nzBvHq5c8oiD/Iilc5fwBUsLXOlT4LnbdH7Z29chgyUo4bM7y07VbK99bWV/faLkgWjasLLhtUkscISn1+yuwmPnT2aArbioUVnS/+UYIiA3XXaGoJYNA7MSEO9Mn8oFrw9iZFTuf0pyc26XfVfGlxxffNfooVMfvW89XLLy8xog36HBOqgdg+QvSQqYf7vPInvdkR8yXGqDm/OqQPlKeMwD5UlPTtsMVH+JEMGXF+iRspKhPtV9GAP1JVxI8kEC9iYAorH0nLv6DhXvLG00oHBfJxKPRfaVkKjg6skyYyxds1mQSOVpKjw2p8Ln76Er4ESrIEYzonGvNILEJfvXKaTwgioV9Vq8AOvupzGHkkiWpJ4Jcj/FUsQ+8b4tODccWQHbvZrHL4GLQ+J7q6MEkyGUlhgioBBUlckLsSG4OqmUB2vbWCxbqiSm9MnbGykxyBCaQEcQvVmfAI3BR5/UcL9pZP2uPwMx9htEAgMOB5/uRa2pSZ+0dLbOVC4rD5FTieFImG5kXGh8zx7Eh9FdXwkAvmNMkW08oCPxAFKsHPbvbcSdf0SZpB6Z9EqfMoWwVablrFGkKf9filx2KW5MpkOHZSimXUNwcmRw1cpJsx+PSTw7NRjsLtZm9WvzkAkXaDq9Z7FuUOPlF1auBCxdNOXUVv8AgAWOUMGd8VyvV+gnOs5N8oh8F3bH1xynp+ibdvZdhPRj3Z2Q0jR8szntdd2rB2jtHoT8lfN9HbXfq3v+MPnCnRmtHzevX18pUMchJE/c/2b/eRx1dm2itm2v2VafdUv9qeRUGyzgve18SST2Hurc8AAIrVNaS0JwmXtKqJ9bU9HNsbRyD4Ea+R9di80fkzNuKnbXSn22j8jX0H3XjaPVP1mCVCunsp5BZYNhLeLXDEPVJTg8BUCTuj5Pgu8lJEf1tuj0+qKP2R3nKlqHRkv0PbckOYop8j7WkJw9W+ogUyCQ8GNK8QmCe7exY9eF3lnAGwYuhW7wYWN6IBALodbA5v3eudQJKNv0YUJzzKvqIIcsHaEraf80qQ8BvCq03fIugUAZziBJqiIpjHMNbbpbH6zpTBJzRcfabM9SBXEXsctBmfhKYRTQ2ixqAqo30vPb8pqsT6Bd2BvUXWr5fc8tN0KRnn7rpoLz9tZ4kZKykQEDBzBlfE5pGmMn7nC1a25PqmVRIj16kZY58LS+1dEB//p5NLJ8Efe0Fpkr666zJZYFpTgqVTC9LP4qCnE99Utw02jwZ6K9+YU81MAx7L8iWXMDgL24t3wNibQlDswuWUXYqmS56Flxo2DFLe8yZb/jIMXsxL+TTMY9dSdMaDwIulZl4N25biduCF6YnzToEb363cDTq7hZZMfSem0qibyQlm7hyFeEuZyYHn5A/FkyIHxRDMditEB8TRJycsE865COWfYEr8J5/TdPZJzZytUYRW20BRfIcyVG1i70qbepETwPDdg0E5O7AbuC/AaCR8eX4JmHHWgLdKOO7INkd07QLHoG2hoHHyNIe3BjknUy4pKryiDZ3HVdpmTa+XH4tw70AnCx8yhqV9uZpyWjxJimlSB6/AnMbBTn4qi+ZFywCwTYTDVkTgTWS/ZyJos5o70IeDlCV8okgNYe+cV8z9cz3ac946iwF1qqmsGuD1cPNlp1Ti7zW4zJoWeeHHvvm2uWlBgZSwzl1XwYrlJlEOfZB5KyLlIXlqmS9gMz/nXRiipbnc3JwyA7P2uHR8YFH8tiNJ7F63lnCe6AMjd7tioiiAAyqBQj+k0Nnq66aefjI3dim0ykTld6D5c8yOGSf9YaGPFRukT+5RS7+PmTamz43fx7u2pc/G71N3m9fn4Pelp3Xrm8HvC9Ga9s0Rv/kUQYJ3Cc4v5nd71GQfkRoB4lavWvlUA6P7njFi6aPGG3E6sonhhHpc6EiDDZOHmGkzSQetb4yf+8wbS1rEDSSd3E7JY0r3wfv1tJty0T9fK9EF0r9Z67BLqPEiKqdKsQvNeexIRf9MC52lBm53CxoAwFohCB+bJNf1xsURRMXgZqbWG5ElOvNyM1B2qoN9SofIXnXtTE1DyZ3tkt0yPYWECgcR1x9UH0KmjYFkfAh3bQukw4fY3eZBLnzIPa0bNIUPmWhNoQF39Dlxt/ibv1/15q9lD2su1W5huy1ntTf2wTFoOcP5FG54y1Hdzz7Ph5+4wRYyY4bWtSP3Mj10hcdn+6MAxBT3M54mf7b5swHwgAf/NLS1ufF8FR3XFktrLW6+FHb2iJKlxRk1UEFz1xqj3SlJmv2Th3CXOkfqDGZX//9Tom5m5UMA7NNN7Qlg7wuUSkNLqrKCQLC7CebggcL2hAAA3oKgk32IMSACtP6pHYhzajtU23j2tbnYOTyHNsgHQrHG0s1yoK6o/WvOQI1JW98+Ym2GvW9Ivj7cwTd80Xu3+NqzFLgO6PgszpAOsgsaovWeaFAFu3pxQoC2b2sUMkvr+63B4aqlnfmdj9NUwfk+j1eTi66bOM7fXl9vNQeYpeX+bvYR1dIO/1EbAFA3rq7qVs3gtY2RH5tXlX8j8PNFYB6jdrOZxd4s+i7Hm2ZF13ww1qz8zVt2detaQaDhrHKhgtl5qUeseE6EqF6lxLKGO9Ve7RMauA2LKmzHVCeIi0LCLl6dPHHxAlStnFHLJxBxUQTieHkBXFQvWAQh4pu7zsGZ/F6tCyqXIMTfUm76Oqtz+buX8VZ/V1su+gMMR82GrEajbIyV22eJd5U6Ql9KvCduWnNptZsofgV/0QHf2CG57lp9YxcCT/YIvIIgu61BXBC007YgGwi6a/OQWzxM2dhdCQ+DMrUHQ6zmu01gxnxIerC8JOJHxGLwNz8FWK3/Wj/JV7ei5asfI7lURUurkPIVxo+VzwtVzSlYU98N2HFJspj+yaF/AOq+DYHkqGNy+GGr0iE5Lz5+XIzEFoC1Znfrwbchro5KH/6qRDJZhOD6yWjX6rlCOsgNbveCemhArd56p478XsIWt3cEAO4s2F6+12uwM+ejPkHr0wd8d+qB8gaB1Sxlb4YmXoP2wSJf9WPUxWkm+tsj79+YQjqwtHTMtMFEzNC3/z+U3SUOZPHsfeah65rZl9n4KDuX2rdQa69bZ6rk8lcJTmmRCC1wV20+cQCu0FUhVGAwUym/UfMdruoj8sRY6+L+lCSIANoUp00ldAdpD5cs1FfzqLh5VThYEYjIO9lwM4vc8x0BwJTaTp3u9Q4W7f5RHJkAz3Xh+Qz4zcOVRd3a6VmF9L1jeryDmOtr4eYAhnVc2ud1vGGP7depdwsaAFAnMawLzgquyyoICO7eYjSsrw3D9K9szyR3Owa9razo9/j0qJgtZo10F45KKYg8KuKJvHQ4ro+BI8kkH00CaUcTtbm/hO3ht/2JR1nRbJ/QH36v6d9N0+PS+/An8YfYjg4sObvEgiIkmaU00aCdlXVM+ANWaBvOdPQhjlNj4A7RMceclzPRMT2k5Iycf3Hmo7CeIf6gLx6nzXqWsVqRzHZN4n2XzZw5LW4LZ7o6b4sB654R5vHBn2VqgQKJq9wZgZNmIVcBFzAyN6C/1usuqKrlhzZ5Jyg8lH4JqgZ/zfpOFJCi7BgNK/jXjY6mcpPMeihr2NvGqE+IT8Bxz/o3Pkv8fzHz3mksUwKvaoRNjiHtJIAwuibNn1o9eOLZaJJrXEri0zUHBCWX5vvvbscnqxZfxL21J/ebAz8g2r2t7tBPgvycmL/O+Eryc9jb+oT55+aP/f7nSsYLfZkO9rcfULLUHnoRn9zeNnvzybW8/LkCaM/8YMnnxRFg9ZdTA9WT+ccp5yBWj96FNL81HMr8vrosYraKrW+N6rGqH5Zgq43Clsi/1myRM7zwurpVzri+lqFdUFth12Z4UIaqqY3YNMeQqDaRLQ2mM5Bgoe1zdh3EcraCAQBso2YmkGyvpCZjgbJCLY4/bDCkDrdTX5PIwfV5UwPlwhr2aSFyy9qd6UDTO9bLMHuC081C9gjOwMicD9yScbkzsJ02z1XDD7lQwZitYzQZuGjFGjQT69fo/Df60p3JcXAbNtV9+QvlqmucgsUVu5iYaZat5i6U47UPuSKkRduErjXpKm+hyDy85jtzvJLRPk0fXutHWxzVWRE0YF4TxxCJRuoN2tZouytn4wgZt3+4XQi709YTJYIwOHTdYH7DNnL3aqA4UfUpKztqAtk1ErnWoyOeWD4dzs1HTYYk8zpqf9YliV3Xf+zhgh0mfyj26tqBvTn/4xJNAKlIC2d9fZTXMzUmMO+h0qffcc6hpL05D+Uc2H8XiI658HSHio6nNdOhUmU7ztMGtHjPENG2NC7fktJQqdLOOAjMxyYHbE/t3+WAtbuoXWoWKvo77opxhZALfC6hG/TA7E8a2G2LhDUNnNDasDnAjVoX9hC4TZvHXvND7EME2M7+BPJ61NY+vKAcHyiPn59fjup7LFG3gWqeyqtVjo/Kv2d75YSgXs/f0L0dV7V0H9XnZ2PVURlFQQViRDZS77hhZxWSstK/q0wf73hwIhZ0M9AVclE5KHiHW87jPrzT9jl1yhB00+mEAfDgTocSJmZJcL6GEZK19nAcCGNehFhY/KkO/xc4JZ6Hd1+BfeXsSzGhUxYwxCHdZk8YwdJrLlby7+4rZ3vLhPFcdFheXolpn2TPRUXr4zidXUFK9WuurkiikQkE2l1b4e3dNawmVkC9cMDgmeex0ksF+psrARwBVJtPs5XQNaQ6WFIs2ah7SOh8qPaxdPfEE0PDmukIAOagRcXnhjIHjkD/3dRn5iu2l7YOuFDfJK/p46ieda4cfxQWxfwtrng8sa5Esc/XKGit3goGAKxDc7YOa2euvapd9kRfpqc79oU6uB+bzSJq8Pon+cn/PIMpE8Fn+6zobz/8kRnhlTH53tfosouYeppvt4udxjkXCzek43LugpTNTrRgenXBNL21fm72Va40MOpZBqulSYLtZaxXA7TE4SADZGr0JOOM6tjdQ+9Us2MePoplWQxELvkoZiUD4169hFKVYbHFSyg1H/DSdmb2Xf3+eGY2RrhYoovfNq+OzeZe+O2/pm5/xm7dhIqUveY8yr25TV/9mt/60RUAB79Djym0H/zqPgiK7cyIQRi+PBeCvd3rjJrfVZTvFV7SRsYnx9wMAAA49ELD0xh6EybYKpAonfmIsyifT+VBEnbPZjAAAC0J1M6yxot2Nq3emcUdQW+nNq8JvCm4fh0laEqu3QECKuQukSDDsEEsCjLbJsYuJ1u4CVNjWmxyE2bKXrDdxZ95dsbW8Xjx7KUV69li3Hd517ry4rAlTPXSn9feZVJzB13jupuOIt61djtCAHDcQMcq+/DsxXNKrH+zUHr8mh8FnGn/wc21FYsMtAW2x8tu4BJL9+BAdG37x0VDnGZbr2IRJPqK5I3+JKSj6jCpPTk57SKzG3p4MJwGHABgVOqf0sJcdfgUr014hQVR0Ku36Z/L0u6ScNcbzQCAHZxFeC7tUW/FyBbLp0w6/1xY8Fp16F0FHFUf0wDO7selCbByq5+i8Kg+PpI6BtQqmwwDupXLNB6NeU8HSay+78kxOc7uI40ci2oPwcFQ7OhIBuNiwWdDKvVrjGBCR4IHnTUM+OgCX1huZfX3xuZhdYd8SJ9VW7LOqSyJVqV3bHDy+Mazf3H2MPl3Qw+RlI17KunF/t7V69FvO5b6plxEhkhvBdJmEOfn3EuNAkjkkuAOCaT1mHalOL/Bk6OLMZG0SdEeneCnubPfVoP73RsIe7jIdGeBUzgwgeHJl25FJOtCybDwaPG8sxJn8X9oyhg5As0z985BIdVVkXDhMd2v8tjW9uDhVWb8tTxgcyXPI4D2BpCmEkpBqoMlxfIaUU/fCzp03lPVEeqbxpJouPMmAcAOdFU/+qDxYeii/34hvCDn8IvZLUh+7+vQde/1KtWhJ8y8OTo4zsUARbrBKYUBAAdtidhg9ge/tm25HPydz4t8nvn+1/XV4+gDLK83VjpjIOOqpczF2VU8B5w9XfAETEY/3x7nNhJuYTMYALBRtMQuuFS5SCVxASE6nBHt7O4hvarOG4gQpx4bcyyQIJOqncXdIXpnZIOgAKhP1cagrqC+QduCRqC+tb2A5uGhj7YOqMED6uxlMEewoiHlNz8J/KvaFPEuaSNG9VaqZ4XoVsJYCwAAnICO2REKYI4JigTSZMzZAJxh6IpBNRrCAADXAtOlrpZx4TeLlpguWHADHjQ41eeJB5KoYXc3QKNuxyR05bJdDmwoq+MgU4xswIfINWRkpm5sBGJOxmWaUM5jL0xr5U3HbGfoYeHsTW/z2OEwFiGRtYTNQuyNLkWO3EA/XRpZzABpTUJq20nmXPTf0M41fzSxpN1MV/Lm9GmWun4wka8557TnjTQ/jV5Z/FAT0STN++cw/gJpQb9/a2CK+WIbj94epPGsAgf128g/yUTf9/+f4Qd1fQWTQRQkDgzRJn0ZXUdI8yOZnCulF8dQENfXg/5WrWWZYg7lbxdWYrrsFdmT9s+7B3m22/rULbfUQnppw5PAzncU9K0ROTH1HEXnAFfVgAMAXDV4aPWCBQ/QCnE4Gr3ahhqAns0ejIGDhjeaAQA0rl3Zk6V/WWvM3eCeO6W6Pzbw3Lc16FxBRm9tjCoAB88jp6ARYPYnLXJVGIn75jA3dKWtMSLQXW2LWQW5W/aC2dw8E2BHNsa9wMAkD7YFOnjOeMU91kFWELsJsdEXFbaIx9aynp4CO3NO7T38FEFZDMFaJVKHZl5rko+7maLwVgIAQMrXJhZM5GvlOf25UcdP6SSLOU5oRUE/5TnpfLiWJr0Y6PSdY/rcY4Ok2+bAhqfyuHQrMuMLpRND9vYF1rtggRrDMHsnEO5/SP3gcPtr0BhNFhzR3M8QXE5txWZgMqVTL8Zyi9Bz56cslre5kucRQJsBiCOCLIdYB4uLpQuiB2o72/bVHiriTkaFkOfvfQQAqbB0Xf04C50Owxz997qg2X2zWLxjG/y1rcfwV4Nejzpft1n47Mysd2/AkZGDCphmCX3yYwMJR/tMg9HSJPVvwxevOAjLFkXBAHjhtMXoZqRHB69J37fuMibtptAJMD01S4MCD1PojK3sJTzV07h6xab9G2jP2PtdELeufJ6J6SrkSFUGV1qepJ78PgT1NGmRaWFXEIUa+IL+d+bt/saWzi1nvpiuw9mPu85YGaIbzQNqb2+7R25/2/ljqmm0iwbkpvpkJ62jbiN0RqW4GUqE35Void4EeUWYSkU2lgWSO1i6c/cAKFtaCGooESK6jQwQEG3jAhZTdCDGwA4DTmk7hgbgMmZYBCtvs96bRoO/WDPrexl7xE/a7CeSN4fLudoIAAAcQTXUzgQUrAnuFYcTPr0cYx4BkKGfWx6nEnJ31oMBAKtQb5fBX7Pgq4UZC4ViesnhhogHojScqdAz5Kqr2H04YuVOGjq49JdTvxd6UyktBHL5B5oGjcywoWnbuAwFTWc4w9C6ytF3HK7ytFovsBO+xNoLUdW2FNMQCltvFIGBNM0BAUDWYIr8cNPQ1o7dv3SDSRuW04LKGIuxQXeb47kDZTHdkaBUs/0LEbyg19SbJmit8BZePYlv7gXxsa7SOX7tOqsLQKLSJa3FAQBML2tgyKW39b6k6xWc9pIB7WCIWaeIc9Szd0Bim8wAALHAYN+D8cV4Phdn/bEkpeMOPZdcHtsIKjo1eKr8uwYxU62lJOAmmbycm8qYNSD3cyk8ML1mf2tANBmJiiAmxkbTIGuNi2tAohiD+xgYy44Gw8ASDx4dsPRrTEqD183iQRRBIeeWKGBuSz0X30WRvUh0ChVOVV3UwQymr+kjALgssGPtsXuYFWLqzpdpMCUZnWPSwDPvalEyAWZ5sUwtgbtrp8IqYMut0XYNsLr2ftsNzKG24urCxyivKZz+ahUOt0B1BaH5A/6UvpAAGj1AciXkhdQHS5rlNOseCrocrCioPRmloqFmPwKAJXQVPY71QofGQUFmQcjnSD7bWULPtYxVBaB/nW+CTx67Zy2Q/pscIhbDqc/HXc5ktTOYBwMA5vSVumrqZP0q2pWOUzTbiuh9GS9Jv+zl6m9m99UiN9e59qijRa8mdtZs0Z775+JV8uvdfvvx959eDWd5jS4D5VE7XX2oCFPDB6eb/pn8/KrGII8Et9sMBgBcSRJgr76As4CXWlw2pXaBGdXn7RLcRg372YIlanduBjTFQjIH+kbFjO4kDKFcZu3wTojEGDCHhzXYDqvwsFkeywLxKwj4yjBufeokwtknplJD4lYvKn4y7A6BUB08MSxIyThD20qrY6cZ2vbZgnbAPXvb8jcWc++qOxWdlI6eww03hQEAQ3rH2gVVAJxLcQorpTTa1OeFDRqqCJtKzkWzc4Gl2p2BL25Gf1mDFrSQgXBSRmbmmE5UMi7LBmMwhhXBTVk7W25wJxbt1hCPnDp3eMQjX4PJPxJjS/MpLczJ339PfwcqXjp9Sg1Wdb0R2gwK1RwQAMAL9kRDFBNNS5rbsMKFSVu67D1jNNcSTJ59zOAlGL/oGTZ+D1GBqhhPpIQSuqKM5xT5ZeJllHaBqyoAnflZSq6siAPiMONhsJClND+Zc3R9oE2bYB+/dlnYEehV0wkA0DucqJ7p2I8hp3IY6mXchYmgHl6oHSA5Xj1aQArPtgrUdpMZAOAlONs1bdcAOsXHWALorv1zEeReHbrHgOpqY3wDOHwejRvaSpv9hfOmTNexSLy7wkkbm3PAuRiXOwNbay+kavBSHdsoaOOlCCs44IAhnjsxwWCeh6vi5FVqZwy5ZczJ37/PBBsewoEXFbQIAdWcB0LG9feA4Ern1dSCyOkIuihPlr/TALsbBQAAM5CTLV8YaYiXVYJsLuNVZeER7XOMrvGMYy2Kp4DxXkRSXiYvOvknsmUPf74Pt/5LDzbvBRF5dFMYou+T3o4vZ/CoPoS5tEhbtx2EeKkUvoC7qWC4IubMcfCWEXyrvHRuO/lVc29pQLYYFdqAaCVDWAt1QU6beVi7dfZ57SIrefBrVwprPRgAEA3rai2DqzdznqmLYjjU4Ah6QRK/RhgfydBvTKxPfVlxqOdTOz6cQ3XTGew+tROATeiCY7wrdB49zbZwrzaFAQDX3DumVF8gHZBIi5sMvAxYu0LvkocPOnv12CyyQOpfkzOwfJfrSbkHfeBCKwTsyufiiIW0MewJxEobF6KD0xGxhoyBwFzt5BHbH0CKnhQLe0Zq8rZO+Rq/xifUN/3+O1WHP/Aqzjv8lBiawSNurFjKQwLjLSMg+wpKYDSX4vso87vMPVJvXPTMYJiTgDm1wMlwCZdxamHshYqpLXSmQBwJgy4LrkK9wXvh5Ie/uzU3k52dp8J05gISlNAn3odKmJs1YQDA6GVV1WLralTptJAjMAOY1eFc5WLdG/tUnTclGt/UsM8M9yY9qrSZE1UNgo8sOAtR11BAEE4rXSWoSiPwpygM/D7eYo8I0/DHS60XxpMFgyHyOoaaQuFFcDn6aTYvxTvwqf0bnfXUeJLvzHLMU7jQBtmWwdAW0sYsmtBWum1ZGNDW0F5YRHiLbQtoNShy7UutFbs6sW/70HE3VEWEEVXBrqeeiONSVoHmahjjQZGRS7Lh6cCe6v65AZCR3w7wbThNHfFmsGOgy6x/ctD1+OJ2Cux1CrdjLBoz7jCwiwa52cDNcF0gSc8FxYkBzE6sbcPMmOKpJTbq/aWtA7s4St75h181hHvDRI3bLX4Hv8AnZs1we9gleqMex7nx9a87wsOODpP0cySQaYIhAW7fZ+iD0JM1YvDlw+CPCxzUqw2Usz1gSqsEmiziHgd6arv5Wm09cbwbCJReCuv2ca9/IaAz+eIU7+idW2o9ewO01faevr9NYVo0sT1TGbEw8mauIKHbhSvLEVg/ikV5cdJovt/G32ZoRRW/LVBPKewUrN0AuIYdAACEehn1YibQF3Aau/CsEwyLV28djkvTTuFaG80AgIjBShVtYxCkPjYDCqr8m4AKatGjpbcw55kiL2TwVQjzXFoIVPgexViMKuDDa7Fvwyv52/herwRMt3rc/eQlnrSC+IUw0yql+3SScAqActPqJoSCHuMSdbZhiAEshgi7DcR5j8JM6hM/gzV5F2QSevQ5tsR0LP7hRaYdnHgttoqjPGfOC6EcSwfI4YHGucUDMCzWs25gmxdVFgfYGjujiyDHFQPecJEtCwBA15j8frx3g1vYjg031JW5SDYFiGNdYFQsyNvuQkkdoGDrYhVvWWLVi+vOu9uFt0SbooB4y9jYJ4N37K7HSJvPOuKfbe3JhH08HtSt7j9+9QbqZ8ClZDM+m7o4cqbL5+vtihJeZTMYAHCQhK0trZ/h2hZFwZRCfh/PaPRVvqRf/JWy7PVlza5eqHJCxLSnxmk67psKXBU7mjqsJeANG5TqAjjr51QUNJKQrM1gAABPEoRXYTBw39T+pT4wrLTB2B17oW0+Mt3+Iw3rkI5sTnHnfeDsCnXzgvB15pFZiLhpqygV31S/QH5cf5LtxMwwFN4k8ZEgd+/TqYLssTiowBQVT9li2llRkaVNj9mVmT0V/zIwY64O/t1ZvelXqTlU+y/T0t4Y+ueEFpJMGBdeCqnQBUEiyshIgyu2LXIdrsgYZEzh0p48ZwFpSDM9ePKZx2qcb2XFbxXVoJH1wrwRiIc3oSPWgjtiVj/dqic9IRNJkLhpAABIFriS0rnMhDsSXA8XvG0EOmc/AwL9y/BzlatXOlRVUDMYAKA6SQ67vK5O7ltIQc48bokyqsTcqUJveyrGcLfU1YKliRJbhtawmJwxMnlRs88eaFdZKJUOMyzkZI+xp6PKyJwBHcfG5RzouMMYzoN3vrOAbq7kXdoW7W0DcrM4kUmvQyXHsqS8McQ+mRSqkku2x0b7b4723xDtTzHtX097tmGm1OKNdo9oGiPKGSPbY6X9V9Ae/+fEy5auX097PsVYqUU67UTmrEMGg7+lidW/NfIsVH+7FVnnNpEi3LaxIDIrYBALao3JBsTLWW6Cj2mg3XJ7GvDLZsd6d4degloP1cixbxxXOqzelfrI1fDiWcaP1AYGbqKimuhTupq0ozWAV5kb8mmP1TofNH6HOpstU29BqNr+1ZMwyq8m3m9VxPGrfy0TeC2C2qP9gs+cN8HjbNZtHb96U6puCnDz3OAOS/z//6LDuM3LooEJOeGSDK1qh6FXnwRzx7P3CC7VYAYAHAOhhY42YAhYq1By02fGSU79I2UZNfXh0wA5syybCqup/LvLa9rQu6YkNSRSoXrWaOLUZD46ZM4LZaFYhrBJlhwws/tXNuipy3TaZrR16q8z7WfxOX6kpOPkR/LH5LhnyqMm6LkUaK7Sp8teC3s7a2JUGqEZyaRTgDYardqayRw07jMuM71yHqIx216YYW3ejGgxzQl4Mz4reC6Ctly0CM4MkByuQy/ExBcV7hriiaxHnQHdTmr1BVWMISjFNJCNYDf5rc2D5poWAAAMotVNbl+E0OwRV5HpBjE4V+IO8lRdYHwTaGa6FmVL00kGLZiL1ZrZaBVfcHcS/C1WvU60lQL+FsXX67ci429jVxb79UEnbb9F9B3PbfR9mm4XwQCpgdcfymczv3x+ry1KHf2J65Oc+XjnkwQBlUyweHBZFMG1NPTrs2Sd1vVoQxgAcC0JBGjrmqFGuTASZbxH6CF/XR4EfoHi84xBq5WDmyg1spNcBe+Q1LAOR8mttVRVLChH70boYYjXITARBgCoI4CMQaOtas4GOJ4Xc3gdaLaixhSlWjI4CvGGeS1ZjEKW3pCOTo4ybpdMbaWy2MVqpRYNSp2jvqDAWw00xzLqE+atxoB5jHee4cr1u/zSCu4IfVI7PwAATKPmyDMNoeSg0IMY2iVTLVXyXEs0pG/IO2vMQONJ0iWxjr7d21HBRnJlVDlq8nmSgx6FlwMeo5BFo4rApiVRhbjHOb9FZ5ZZ5ccTjcsF4EmM4ULuXyzpQCtvtJeuTpVjo4nctU60B06dLnVwj1Qiq67TrTIRFa0a3+6/kmNzy9IzyqNSM1hzpV5j34syRfaxpJajX8s+5qVETdqZjTemZutGTkXNMMShwdTx/h2C2D63OYvHbhsLFcFOGWNp8dRVuhtvwVx5Fgt3x/yhCu8qZ965waYZyD2ksh0FG+F8sbpj/zovsmA5a6oZrh+vTDXca4J7yH2kCrgbpKwDABwgjhEQaRhgpfWgEGlnOsJNb9EMADgsoifbEhRqtg3Q439j91/vWFwSjUmq3TtG39H8H/wF/APQ2CUe03lS1nEpPl1ZnrBqnzi8lLWB39s77CIryfljhQsTatj19zZUJkJEtj2vrisrWcqEq+6MRpIBrTI3Q+OO1csId6fEsAmilHT1Tme6BbdtB2+hwK48b9gLiuA6kcFBn19OjXOn8wALOomQc3NeFRp05tocSkXk5I0xmBx5moWfxC2BN39U4wIAEOxo/19Qo6/G18yDyZGDzsBF/WAk/hhWTAAgLOuDyi4nbAbWyXb8iq00Oi++mVhhoYtMugq8SiVKb34oRcvpC9ZmWpjCufeKoBhVNIwFABJqW9nPzu7DEgqt/w68UDXdKWjD4DcA5hc6+IzntnIga9QlZDK5o83iyGHp6BJuvCkMANjB6a5ACAiC/fx0O49GHl+Kmky+J3l8fTxY6Q72UF8t1Ksnvxa2EBvj+sZ+ihZTE/sbtydanyXmIdfp44tiz1Wxu1A8e86kKnbP2uAeQ2tB+9zawbfWK2szGABw0NDuTHeBDSOU9gdX03v875ZflJvGfBZC16rPcyFBOyWdYaxcx9JECd2A3H2nlId/Ztqv/j08uiezqeJx3/R65FKDOJNblw9m8sZfJU+VDeLVQJ6wjfG1Ic+tevHlTKfk7WIMvkx2jF4KsIDSs5uX4vLYoHIIyvgVFHi3mu89j++9ozLX097gOCIW+z8FP+p/zcI/55ppzW7trtW2+UMiDEJfc3Cug2vOH8blAV0tAQ4xYAVHZOsjf022Yl2OEErYsm5kXgdKG/rXDlm8AUixRwfMlj7Gwm+AzgEYQUG82FFUa3SAHKs9zGA089sUNreEOzzHxwgm1jRwqz5OXM/djdB5PKECNVow4M7ITLrcOfXSnZvxfctPsjs7i1qcDSTJO43eyFQR01S+kvZQNAPZV21z6m1jkRJ4Lt76kgKCQ1rfE6L5Nva6Pk9q2PH/98kzgeRlQS2lz6h7HgnvS/whU1Hx/9+4pzZiuJ0ey2Ubyj5Fvy/Ms2RO+cV4SSAoxuzQhaB0bJBuBKdaLu15ImHjNHQnLV461I60IQwAcDTUO3yTBVNVEieVlXeeK3RU3PA682430M1MF/ydF3Fz07lxtwVh0dYAq2TNX5+1tA3CrnYlWp+1t20MuxafdaR6Yed6TMQ6XWHnF3ST3R4LiE1zk91Dm5bNgbZpi2iphjZHS2qvAmXY13Oap02Uv/Yv+xZgztpt/Hs949rx9dzxaSPrmACAfSbfNMjluDct5RofhOnh/qeDXJpIXdD7xUBMcLjaGl3YXDdQhtNWTGGhzOZf39QMSYWCUpYZ4rgnXXhsWU84wIMhh3v/sLO4CSPY6sJIFaptyk2BZvg7w9jGu4VgThvbOtlZgKdJPv5GMxTze4YnfI4mg/p4E2IxdufV40z9dMjaG16pqErbSAFmXOys0Oy3SoLZ1QWX3oUhQSb6+VASTvdmMABgRhJUp76gawCRPM8MvUGNnpbApNWVbtlHuzxR/dnWGnTrQNqM/pwTHI8WctXjlve9fVIy08BaxiXV6/hQ8iY2QwqQO9KAwHq6QlevyrFW/Ary24cTd+YxnsDfjoEFbuSK1OMUy6HXU84oAlV45TXmfRX6y8P0BXb86rlUoIugoc78ic3pMjwOqq/+xa9+kPNkEJzSZgUAoAQgp3bIFYaSIBbWIKmVY2rQKYCAfq5egyVLSE+VgaylSeonqFvwrEOIiOsbTe9YrKg47VboXaAzXA1/gzjLlRQ3EK7PJmAXzirBBcsgZSGZkLsuYmbVeRtyUUVLLwyUQy6hM8BcVeWBxuVw6KqTk9VY3pyGE7UJGsWlhgS2r9NpivYPES/LVUdTKzvc1agAgCh34YcD5+Fphyu2xfrlnZPkD9eF0/nNbCjX3VoMqs8SAS9lnuECz6sbw34iEttvspoHHja/xO8Pf3xQ+sPQsZ8ewq5eTQqYTT3V7CGuiornatmFIK46bPAqgwv9Xu1pK6Ofuz9O63BaFwYAnKDB7oImLIiH502K/mnr81weZOSkW9fCuxkEL6aucmclVG4YUHzSQzK/471BicfMZCubJYQcjEtWGU79/UxJ+pKgLaRI8Wr1wfDhlb2rDMsM4VgWHwp/Mu7EKT7pBVbXCXdfoFLld2t3Wk5Fmt6/BFPsn3r1TeC5rPrX/bXqU1/Aqyzj62+bu05qKZzXvcmr6Z/jbHaHHhdQN0LPCpDUDznMD84cYIsDOh41ySQ+PyL1CoFtvGc5yB7cd1ceMtL5g48IzOj9iWqn66fXCJz6fU59sFvzdV8vZu+8R9lm1H8iEMLt5Q7RR2DFpYAXDb6lLY41MZaQ9BWGz7DgI9Di4Nt/0rQiFnYLD3NJxiVqQoiYNF1syNDlJqEDXZItluLAurjLPtW3NIqLBfp43MzYz+1Rveg+xBLxSKa+mGG6SeIj74foNl7QU+ObxseVumVFfgZm/TK6ewspBk21o441s+5zQoW4nXwT6WtiN3FPeVOA7By/tl82e6lfeRO/sKVEntveRgbCeksVuM6CtErgMyQ8H/65R2MfuUStYvA2qaPbaZJAI7KvVQAAjgDdakedyFW4R0ZEGxQ9GXXKNIkGvZs5p/L5+me7BAq3GQwAMIgEDKXDb+YQl8UhgDHPUysDJ9V50zGQ5f1LkTwTfoPp+L0LSGo3tgLCk0iCD9AdiMd9tjk2CHsAHfZn2KlSmEXMEp8dWjLVsiA+OxoJfWFFfPbarRtRyn6GvdmyWACETEzVTr1/UyaCifHpKmBSh0kEoD/IxE1qqDbxg4DBZLZIBABzjQxnaSFuHTI49wfe6CmJp5nnnN/bt5rVVPmX/aJS8b307zvTYJvYcf2RT90xxfYdeMvHsTu7zKnvjfeUxc5CWmM64RLjfNjM2+iunfqp3Gx8OWJg/RyH4fRTQWAFBw==","base64")).toString()),X8}var Ude=new Map([[G.makeIdent(null,"fsevents").identHash,Lde],[G.makeIdent(null,"resolve").identHash,Mde],[G.makeIdent(null,"typescript").identHash,Ode]]),Rgt={hooks:{registerPackageExtensions:async(t,e)=>{for(let[r,o]of V8)e(G.parseDescriptor(r,!0),o)},getBuiltinPatch:async(t,e)=>{let r="compat/";if(!e.startsWith(r))return;let o=G.parseIdent(e.slice(r.length)),a=Ude.get(o.identHash)?.();return typeof a<"u"?a:null},reduceDependency:async(t,e,r,o)=>typeof Ude.get(t.identHash)>"u"?t:G.makeDescriptor(t,G.makeRange({protocol:"patch:",source:G.stringifyDescriptor(t),selector:`optional!builtin`,params:null}))}},Tgt=Rgt;var dH={};Vt(dH,{ConstraintsCheckCommand:()=>OE,ConstraintsQueryCommand:()=>LE,ConstraintsSourceCommand:()=>ME,default:()=>adt});Ge();Ge();l2();var FE=class{constructor(e){this.project=e}createEnvironment(){let e=new QE(["cwd","ident"]),r=new QE(["workspace","type","ident"]),o=new QE(["ident"]),a={manifestUpdates:new Map,reportedErrors:new Map},n=new Map,u=new Map;for(let A of this.project.storedPackages.values()){let p=Array.from(A.peerDependencies.values(),h=>[G.stringifyIdent(h),h.range]);n.set(A.locatorHash,{workspace:null,ident:G.stringifyIdent(A),version:A.version,dependencies:new Map,peerDependencies:new Map(p.filter(([h])=>A.peerDependenciesMeta.get(h)?.optional!==!0)),optionalPeerDependencies:new Map(p.filter(([h])=>A.peerDependenciesMeta.get(h)?.optional===!0))})}for(let A of this.project.storedPackages.values()){let p=n.get(A.locatorHash);p.dependencies=new Map(Array.from(A.dependencies.values(),h=>{let E=this.project.storedResolutions.get(h.descriptorHash);if(typeof E>"u")throw new Error("Assertion failed: The resolution should have been registered");let I=n.get(E);if(typeof I>"u")throw new Error("Assertion failed: The package should have been registered");return[G.stringifyIdent(h),I]})),p.dependencies.delete(p.ident)}for(let A of this.project.workspaces){let p=G.stringifyIdent(A.anchoredLocator),h=A.manifest.exportTo({}),E=n.get(A.anchoredLocator.locatorHash);if(typeof E>"u")throw new Error("Assertion failed: The package should have been registered");let I=(R,L,{caller:U=Xi.getCaller()}={})=>{let z=a2(R),te=He.getMapWithDefault(a.manifestUpdates,A.cwd),ae=He.getMapWithDefault(te,z),le=He.getSetWithDefault(ae,L);U!==null&&le.add(U)},v=R=>I(R,void 0,{caller:Xi.getCaller()}),x=R=>{He.getArrayWithDefault(a.reportedErrors,A.cwd).push(R)},C=e.insert({cwd:A.relativeCwd,ident:p,manifest:h,pkg:E,set:I,unset:v,error:x});u.set(A,C);for(let R of Ut.allDependencies)for(let L of A.manifest[R].values()){let U=G.stringifyIdent(L),z=()=>{I([R,U],void 0,{caller:Xi.getCaller()})},te=le=>{I([R,U],le,{caller:Xi.getCaller()})},ae=null;if(R!=="peerDependencies"&&(R!=="dependencies"||!A.manifest.devDependencies.has(L.identHash))){let le=A.anchoredPackage.dependencies.get(L.identHash);if(le){if(typeof le>"u")throw new Error("Assertion failed: The dependency should have been registered");let ce=this.project.storedResolutions.get(le.descriptorHash);if(typeof ce>"u")throw new Error("Assertion failed: The resolution should have been registered");let Ce=n.get(ce);if(typeof Ce>"u")throw new Error("Assertion failed: The package should have been registered");ae=Ce}}r.insert({workspace:C,ident:U,range:L.range,type:R,resolution:ae,update:te,delete:z,error:x})}}for(let A of this.project.storedPackages.values()){let p=this.project.tryWorkspaceByLocator(A);if(!p)continue;let h=u.get(p);if(typeof h>"u")throw new Error("Assertion failed: The workspace should have been registered");let E=n.get(A.locatorHash);if(typeof E>"u")throw new Error("Assertion failed: The package should have been registered");E.workspace=h}return{workspaces:e,dependencies:r,packages:o,result:a}}async process(){let e=this.createEnvironment(),r={Yarn:{workspace:a=>e.workspaces.find(a)[0]??null,workspaces:a=>e.workspaces.find(a),dependency:a=>e.dependencies.find(a)[0]??null,dependencies:a=>e.dependencies.find(a),package:a=>e.packages.find(a)[0]??null,packages:a=>e.packages.find(a)}},o=await this.project.loadUserConfig();return o?.constraints?(await o.constraints(r),e.result):null}};Ge();Ge();qt();var LE=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.query=ge.String()}static{this.paths=[["constraints","query"]]}static{this.usage=it.Usage({category:"Constraints-related commands",description:"query the constraints fact database",details:` + This command will output all matches to the given prolog query. + `,examples:[["List all dependencies throughout the workspace","yarn constraints query 'workspace_has_dependency(_, DependencyName, _, _).'"]]})}async execute(){let{Constraints:r}=await Promise.resolve().then(()=>(f2(),A2)),o=await Ke.find(this.context.cwd,this.context.plugins),{project:a}=await kt.find(o,this.context.cwd),n=await r.find(a),u=this.query;return u.endsWith(".")||(u=`${u}.`),(await Rt.start({configuration:o,json:this.json,stdout:this.context.stdout},async p=>{for await(let h of n.query(u)){let E=Array.from(Object.entries(h)),I=E.length,v=E.reduce((x,[C])=>Math.max(x,C.length),0);for(let x=0;x(f2(),A2)),o=await Ke.find(this.context.cwd,this.context.plugins),{project:a}=await kt.find(o,this.context.cwd),n=await r.find(a);this.context.stdout.write(this.verbose?n.fullSource:n.source)}};Ge();Ge();qt();l2();var OE=class extends ut{constructor(){super(...arguments);this.fix=ge.Boolean("--fix",!1,{description:"Attempt to automatically fix unambiguous issues, following a multi-pass process"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["constraints"]]}static{this.usage=it.Usage({category:"Constraints-related commands",description:"check that the project constraints are met",details:` + This command will run constraints on your project and emit errors for each one that is found but isn't met. If any error is emitted the process will exit with a non-zero exit code. + + If the \`--fix\` flag is used, Yarn will attempt to automatically fix the issues the best it can, following a multi-pass process (with a maximum of 10 iterations). Some ambiguous patterns cannot be autofixed, in which case you'll have to manually specify the right resolution. + + For more information as to how to write constraints, please consult our dedicated page on our website: https://yarnpkg.com/features/constraints. + `,examples:[["Check that all constraints are satisfied","yarn constraints"],["Autofix all unmet constraints","yarn constraints --fix"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd);await o.restoreInstallState();let a=await o.loadUserConfig(),n;if(a?.constraints)n=new FE(o);else{let{Constraints:h}=await Promise.resolve().then(()=>(f2(),A2));n=await h.find(o)}let u,A=!1,p=!1;for(let h=this.fix?10:1;h>0;--h){let E=await n.process();if(!E)break;let{changedWorkspaces:I,remainingErrors:v}=ik(o,E,{fix:this.fix}),x=[];for(let[C,R]of I){let L=C.manifest.indent;C.manifest=new Ut,C.manifest.indent=L,C.manifest.load(R),x.push(C.persistManifest())}if(await Promise.all(x),!(I.size>0&&h>1)){u=Wde(v,{configuration:r}),A=!1,p=!0;for(let[,C]of v)for(let R of C)R.fixable?A=!0:p=!1}}if(u.children.length===0)return 0;if(A){let h=p?`Those errors can all be fixed by running ${pe.pretty(r,"yarn constraints --fix",pe.Type.CODE)}`:`Errors prefixed by '\u2699' can be fixed by running ${pe.pretty(r,"yarn constraints --fix",pe.Type.CODE)}`;await Rt.start({configuration:r,stdout:this.context.stdout,includeNames:!1,includeFooter:!1},async E=>{E.reportInfo(0,h),E.reportSeparator()})}return u.children=He.sortMap(u.children,h=>h.value[1]),fs.emitTree(u,{configuration:r,stdout:this.context.stdout,json:this.json,separators:1}),1}};l2();var odt={configuration:{enableConstraintsChecks:{description:"If true, constraints will run during installs",type:"BOOLEAN",default:!1},constraintsPath:{description:"The path of the constraints file.",type:"ABSOLUTE_PATH",default:"./constraints.pro"}},commands:[LE,ME,OE],hooks:{async validateProjectAfterInstall(t,{reportError:e}){if(!t.configuration.get("enableConstraintsChecks"))return;let r=await t.loadUserConfig(),o;if(r?.constraints)o=new FE(t);else{let{Constraints:u}=await Promise.resolve().then(()=>(f2(),A2));o=await u.find(t)}let a=await o.process();if(!a)return;let{remainingErrors:n}=ik(t,a);if(n.size!==0)if(t.configuration.isCI)for(let[u,A]of n)for(let p of A)e(84,`${pe.pretty(t.configuration,u.anchoredLocator,pe.Type.IDENT)}: ${p.text}`);else e(84,`Constraint check failed; run ${pe.pretty(t.configuration,"yarn constraints",pe.Type.CODE)} for more details`)}}},adt=odt;var mH={};Vt(mH,{CreateCommand:()=>UE,DlxCommand:()=>_E,default:()=>cdt});Ge();qt();var UE=class extends ut{constructor(){super(...arguments);this.pkg=ge.String("-p,--package",{description:"The package to run the provided command from"});this.quiet=ge.Boolean("-q,--quiet",!1,{description:"Only report critical errors instead of printing the full install logs"});this.command=ge.String();this.args=ge.Proxy()}static{this.paths=[["create"]]}async execute(){let r=[];this.pkg&&r.push("--package",this.pkg),this.quiet&&r.push("--quiet");let o=this.command.replace(/^(@[^@/]+)(@|$)/,"$1/create$2"),a=G.parseDescriptor(o),n=a.name.match(/^create(-|$)/)?a:a.scope?G.makeIdent(a.scope,`create-${a.name}`):G.makeIdent(null,`create-${a.name}`),u=G.stringifyIdent(n);return a.range!=="unknown"&&(u+=`@${a.range}`),this.cli.run(["dlx",...r,u,...this.args])}};Ge();Ge();Pt();qt();var _E=class extends ut{constructor(){super(...arguments);this.packages=ge.Array("-p,--package",{description:"The package(s) to install before running the command"});this.quiet=ge.Boolean("-q,--quiet",!1,{description:"Only report critical errors instead of printing the full install logs"});this.command=ge.String();this.args=ge.Proxy()}static{this.paths=[["dlx"]]}static{this.usage=it.Usage({description:"run a package in a temporary environment",details:"\n This command will install a package within a temporary environment, and run its binary script if it contains any. The binary will run within the current cwd.\n\n By default Yarn will download the package named `command`, but this can be changed through the use of the `-p,--package` flag which will instruct Yarn to still run the same command but from a different package.\n\n Using `yarn dlx` as a replacement of `yarn add` isn't recommended, as it makes your project non-deterministic (Yarn doesn't keep track of the packages installed through `dlx` - neither their name, nor their version).\n ",examples:[["Use create-react-app to create a new React app","yarn dlx create-react-app ./my-app"],["Install multiple packages for a single command",`yarn dlx -p typescript -p ts-node ts-node --transpile-only -e "console.log('hello!')"`]]})}async execute(){return Ke.telemetry=null,await oe.mktempPromise(async r=>{let o=K.join(r,`dlx-${process.pid}`);await oe.mkdirPromise(o),await oe.writeFilePromise(K.join(o,"package.json"),`{} +`),await oe.writeFilePromise(K.join(o,"yarn.lock"),"");let a=K.join(o,".yarnrc.yml"),n=await Ke.findProjectCwd(this.context.cwd),A={enableGlobalCache:!(await Ke.find(this.context.cwd,null,{strict:!1})).get("enableGlobalCache"),enableTelemetry:!1,logFilters:[{code:Ku(68),level:pe.LogLevel.Discard}]},p=n!==null?K.join(n,".yarnrc.yml"):null;p!==null&&oe.existsSync(p)?(await oe.copyFilePromise(p,a),await Ke.updateConfiguration(o,L=>{let U=He.toMerged(L,A);return Array.isArray(L.plugins)&&(U.plugins=L.plugins.map(z=>{let te=typeof z=="string"?z:z.path,ae=ue.isAbsolute(te)?te:ue.resolve(ue.fromPortablePath(n),te);return typeof z=="string"?ae:{path:ae,spec:z.spec}})),U})):await oe.writeJsonPromise(a,A);let h=this.packages??[this.command],E=G.parseDescriptor(this.command).name,I=await this.cli.run(["add","--fixed","--",...h],{cwd:o,quiet:this.quiet});if(I!==0)return I;this.quiet||this.context.stdout.write(` +`);let v=await Ke.find(o,this.context.plugins),{project:x,workspace:C}=await kt.find(v,o);if(C===null)throw new sr(x.cwd,o);await x.restoreInstallState();let R=await An.getWorkspaceAccessibleBinaries(C);return R.has(E)===!1&&R.size===1&&typeof this.packages>"u"&&(E=Array.from(R)[0][0]),await An.executeWorkspaceAccessibleBinary(C,E,this.args,{packageAccessibleBinaries:R,cwd:this.context.cwd,stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr})})}};var ldt={commands:[UE,_E]},cdt=ldt;var CH={};Vt(CH,{ExecFetcher:()=>h2,ExecResolver:()=>g2,default:()=>fdt,execUtils:()=>lk});Ge();Ge();Pt();var fA="exec:";var lk={};Vt(lk,{loadGeneratorFile:()=>p2,makeLocator:()=>EH,makeSpec:()=>yme,parseSpec:()=>yH});Ge();Pt();function yH(t){let{params:e,selector:r}=G.parseRange(t),o=ue.toPortablePath(r);return{parentLocator:e&&typeof e.locator=="string"?G.parseLocator(e.locator):null,path:o}}function yme({parentLocator:t,path:e,generatorHash:r,protocol:o}){let a=t!==null?{locator:G.stringifyLocator(t)}:{},n=typeof r<"u"?{hash:r}:{};return G.makeRange({protocol:o,source:e,selector:e,params:{...n,...a}})}function EH(t,{parentLocator:e,path:r,generatorHash:o,protocol:a}){return G.makeLocator(t,yme({parentLocator:e,path:r,generatorHash:o,protocol:a}))}async function p2(t,e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(t,{protocol:e}),n=K.isAbsolute(a)?{packageFs:new gn(It.root),prefixPath:It.dot,localPath:It.root}:await r.fetcher.fetch(o,r),u=n.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,n.localPath)}:n;n!==u&&n.releaseFs&&n.releaseFs();let A=u.packageFs,p=K.join(u.prefixPath,a);return await A.readFilePromise(p,"utf8")}var h2=class{supports(e,r){return!!e.reference.startsWith(fA)}getLocalPath(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:fA});if(K.isAbsolute(a))return a;let n=r.fetcher.getLocalPath(o,r);return n===null?null:K.resolve(n,a)}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e),loader:()=>this.fetchFromDisk(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),localPath:this.getLocalPath(e,r),checksum:u}}async fetchFromDisk(e,r){let o=await p2(e.reference,fA,r);return oe.mktempPromise(async a=>{let n=K.join(a,"generator.js");return await oe.writeFilePromise(n,o),oe.mktempPromise(async u=>{if(await this.generatePackage(u,e,n,r),!oe.existsSync(K.join(u,"build")))throw new Error("The script should have generated a build directory");return await $i.makeArchiveFromDirectory(K.join(u,"build"),{prefixPath:G.getIdentVendorPath(e),compressionLevel:r.project.configuration.get("compressionLevel")})})})}async generatePackage(e,r,o,a){return await oe.mktempPromise(async n=>{let u=await An.makeScriptEnv({project:a.project,binFolder:n}),A=K.join(e,"runtime.js");return await oe.mktempPromise(async p=>{let h=K.join(p,"buildfile.log"),E=K.join(e,"generator"),I=K.join(e,"build");await oe.mkdirPromise(E),await oe.mkdirPromise(I);let v={tempDir:ue.fromPortablePath(E),buildDir:ue.fromPortablePath(I),locator:G.stringifyLocator(r)};await oe.writeFilePromise(A,` + // Expose 'Module' as a global variable + Object.defineProperty(global, 'Module', { + get: () => require('module'), + configurable: true, + enumerable: false, + }); + + // Expose non-hidden built-in modules as global variables + for (const name of Module.builtinModules.filter((name) => name !== 'module' && !name.startsWith('_'))) { + Object.defineProperty(global, name, { + get: () => require(name), + configurable: true, + enumerable: false, + }); + } + + // Expose the 'execEnv' global variable + Object.defineProperty(global, 'execEnv', { + value: { + ...${JSON.stringify(v)}, + }, + enumerable: true, + }); + `);let x=u.NODE_OPTIONS||"",C=/\s*--require\s+\S*\.pnp\.c?js\s*/g;x=x.replace(C," ").trim(),u.NODE_OPTIONS=x;let{stdout:R,stderr:L}=a.project.configuration.getSubprocessStreams(h,{header:`# This file contains the result of Yarn generating a package (${G.stringifyLocator(r)}) +`,prefix:G.prettyLocator(a.project.configuration,r),report:a.report}),{code:U}=await Ur.pipevp(process.execPath,["--require",ue.fromPortablePath(A),ue.fromPortablePath(o),G.stringifyIdent(r)],{cwd:e,env:u,stdin:null,stdout:R,stderr:L});if(U!==0)throw oe.detachTemp(p),new Error(`Package generation failed (exit code ${U}, logs can be found here: ${pe.pretty(a.project.configuration,h,pe.Type.PATH)})`)})})}};Ge();Ge();var udt=2,g2=class{supportsDescriptor(e,r){return!!e.range.startsWith(fA)}supportsLocator(e,r){return!!e.reference.startsWith(fA)}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){return G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){if(!o.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let{path:a,parentLocator:n}=yH(e.range);if(n===null)throw new Error("Assertion failed: The descriptor should have been bound");let u=await p2(G.makeRange({protocol:fA,source:a,selector:a,params:{locator:G.stringifyLocator(n)}}),fA,o.fetchOptions),A=wn.makeHash(`${udt}`,u).slice(0,6);return[EH(e,{parentLocator:n,path:a,generatorHash:A,protocol:fA})]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"HARD",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};var Adt={fetchers:[h2],resolvers:[g2]},fdt=Adt;var IH={};Vt(IH,{FileFetcher:()=>E2,FileResolver:()=>C2,TarballFileFetcher:()=>w2,TarballFileResolver:()=>I2,default:()=>gdt,fileUtils:()=>Yg});Ge();Pt();var HE=/^(?:[a-zA-Z]:[\\/]|\.{0,2}\/)/,d2=/^[^?]*\.(?:tar\.gz|tgz)(?:::.*)?$/,Ui="file:";var Yg={};Vt(Yg,{fetchArchiveFromLocator:()=>y2,makeArchiveFromLocator:()=>ck,makeBufferFromLocator:()=>wH,makeLocator:()=>qE,makeSpec:()=>Eme,parseSpec:()=>m2});Ge();Pt();function m2(t){let{params:e,selector:r}=G.parseRange(t),o=ue.toPortablePath(r);return{parentLocator:e&&typeof e.locator=="string"?G.parseLocator(e.locator):null,path:o}}function Eme({parentLocator:t,path:e,hash:r,protocol:o}){let a=t!==null?{locator:G.stringifyLocator(t)}:{},n=typeof r<"u"?{hash:r}:{};return G.makeRange({protocol:o,source:e,selector:e,params:{...n,...a}})}function qE(t,{parentLocator:e,path:r,hash:o,protocol:a}){return G.makeLocator(t,Eme({parentLocator:e,path:r,hash:o,protocol:a}))}async function y2(t,e){let{parentLocator:r,path:o}=G.parseFileStyleRange(t.reference,{protocol:Ui}),a=K.isAbsolute(o)?{packageFs:new gn(It.root),prefixPath:It.dot,localPath:It.root}:await e.fetcher.fetch(r,e),n=a.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,a.localPath)}:a;a!==n&&a.releaseFs&&a.releaseFs();let u=n.packageFs,A=K.join(n.prefixPath,o);return await He.releaseAfterUseAsync(async()=>await u.readFilePromise(A),n.releaseFs)}async function ck(t,{protocol:e,fetchOptions:r,inMemory:o=!1}){let{parentLocator:a,path:n}=G.parseFileStyleRange(t.reference,{protocol:e}),u=K.isAbsolute(n)?{packageFs:new gn(It.root),prefixPath:It.dot,localPath:It.root}:await r.fetcher.fetch(a,r),A=u.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,u.localPath)}:u;u!==A&&u.releaseFs&&u.releaseFs();let p=A.packageFs,h=K.join(A.prefixPath,n);return await He.releaseAfterUseAsync(async()=>await $i.makeArchiveFromDirectory(h,{baseFs:p,prefixPath:G.getIdentVendorPath(t),compressionLevel:r.project.configuration.get("compressionLevel"),inMemory:o}),A.releaseFs)}async function wH(t,{protocol:e,fetchOptions:r}){return(await ck(t,{protocol:e,fetchOptions:r,inMemory:!0})).getBufferAndClose()}var E2=class{supports(e,r){return!!e.reference.startsWith(Ui)}getLocalPath(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:Ui});if(K.isAbsolute(a))return a;let n=r.fetcher.getLocalPath(o,r);return n===null?null:K.resolve(n,a)}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the disk`),loader:()=>this.fetchFromDisk(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),localPath:this.getLocalPath(e,r),checksum:u}}async fetchFromDisk(e,r){return ck(e,{protocol:Ui,fetchOptions:r})}};Ge();Ge();var pdt=2,C2=class{supportsDescriptor(e,r){return e.range.match(HE)?!0:!!e.range.startsWith(Ui)}supportsLocator(e,r){return!!e.reference.startsWith(Ui)}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){return HE.test(e.range)&&(e=G.makeDescriptor(e,`${Ui}${e.range}`)),G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){if(!o.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let{path:a,parentLocator:n}=m2(e.range);if(n===null)throw new Error("Assertion failed: The descriptor should have been bound");let u=await wH(G.makeLocator(e,G.makeRange({protocol:Ui,source:a,selector:a,params:{locator:G.stringifyLocator(n)}})),{protocol:Ui,fetchOptions:o.fetchOptions}),A=wn.makeHash(`${pdt}`,u).slice(0,6);return[qE(e,{parentLocator:n,path:a,hash:A,protocol:Ui})]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"HARD",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};Ge();var w2=class{supports(e,r){return d2.test(e.reference)?!!e.reference.startsWith(Ui):!1}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the disk`),loader:()=>this.fetchFromDisk(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),checksum:u}}async fetchFromDisk(e,r){let o=await y2(e,r);return await $i.convertToZip(o,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1})}};Ge();Ge();Ge();var I2=class{supportsDescriptor(e,r){return d2.test(e.range)?!!(e.range.startsWith(Ui)||HE.test(e.range)):!1}supportsLocator(e,r){return d2.test(e.reference)?!!e.reference.startsWith(Ui):!1}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){return HE.test(e.range)&&(e=G.makeDescriptor(e,`${Ui}${e.range}`)),G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){if(!o.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let{path:a,parentLocator:n}=m2(e.range);if(n===null)throw new Error("Assertion failed: The descriptor should have been bound");let u=qE(e,{parentLocator:n,path:a,hash:"",protocol:Ui}),A=await y2(u,o.fetchOptions),p=wn.makeHash(A).slice(0,6);return[qE(e,{parentLocator:n,path:a,hash:p,protocol:Ui})]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"HARD",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};var hdt={fetchers:[w2,E2],resolvers:[I2,C2]},gdt=hdt;var DH={};Vt(DH,{GithubFetcher:()=>B2,default:()=>mdt,githubUtils:()=>uk});Ge();Pt();var uk={};Vt(uk,{invalidGithubUrlMessage:()=>Ime,isGithubUrl:()=>BH,parseGithubUrl:()=>vH});var Cme=Ze(ve("querystring")),wme=[/^https?:\/\/(?:([^/]+?)@)?github.com\/([^/#]+)\/([^/#]+)\/tarball\/([^/#]+)(?:#(.*))?$/,/^https?:\/\/(?:([^/]+?)@)?github.com\/([^/#]+)\/([^/#]+?)(?:\.git)?(?:#(.*))?$/];function BH(t){return t?wme.some(e=>!!t.match(e)):!1}function vH(t){let e;for(let A of wme)if(e=t.match(A),e)break;if(!e)throw new Error(Ime(t));let[,r,o,a,n="master"]=e,{commit:u}=Cme.default.parse(n);return n=u||n.replace(/[^:]*:/,""),{auth:r,username:o,reponame:a,treeish:n}}function Ime(t){return`Input cannot be parsed as a valid GitHub URL ('${t}').`}var B2=class{supports(e,r){return!!BH(e.reference)}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from GitHub`),loader:()=>this.fetchFromNetwork(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),checksum:u}}async fetchFromNetwork(e,r){let o=await sn.get(this.getLocatorUrl(e,r),{configuration:r.project.configuration});return await oe.mktempPromise(async a=>{let n=new gn(a);await $i.extractArchiveTo(o,n,{stripComponents:1});let u=ia.splitRepoUrl(e.reference),A=K.join(a,"package.tgz");await An.prepareExternalProject(a,A,{configuration:r.project.configuration,report:r.report,workspace:u.extra.workspace,locator:e});let p=await oe.readFilePromise(A);return await $i.convertToZip(p,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1})})}getLocatorUrl(e,r){let{auth:o,username:a,reponame:n,treeish:u}=vH(e.reference);return`https://${o?`${o}@`:""}github.com/${a}/${n}/archive/${u}.tar.gz`}};var ddt={hooks:{async fetchHostedRepository(t,e,r){if(t!==null)return t;let o=new B2;if(!o.supports(e,r))return null;try{return await o.fetch(e,r)}catch{return null}}}},mdt=ddt;var PH={};Vt(PH,{TarballHttpFetcher:()=>D2,TarballHttpResolver:()=>P2,default:()=>Edt});Ge();function v2(t){let e;try{e=new URL(t)}catch{return!1}return!(e.protocol!=="http:"&&e.protocol!=="https:"||!e.pathname.match(/(\.tar\.gz|\.tgz|\/[^.]+)$/))}var D2=class{supports(e,r){return v2(e.reference)}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the remote server`),loader:()=>this.fetchFromNetwork(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),checksum:u}}async fetchFromNetwork(e,r){let o=await sn.get(e.reference,{configuration:r.project.configuration});return await $i.convertToZip(o,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1})}};Ge();Ge();var P2=class{supportsDescriptor(e,r){return v2(e.range)}supportsLocator(e,r){return v2(e.reference)}shouldPersistResolution(e,r){return!0}bindDescriptor(e,r,o){return e}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){return[G.convertDescriptorToLocator(e)]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"HARD",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};var ydt={fetchers:[D2],resolvers:[P2]},Edt=ydt;var SH={};Vt(SH,{InitCommand:()=>jE,default:()=>wdt});Ge();Ge();Pt();qt();var jE=class extends ut{constructor(){super(...arguments);this.private=ge.Boolean("-p,--private",!1,{description:"Initialize a private package"});this.workspace=ge.Boolean("-w,--workspace",!1,{description:"Initialize a workspace root with a `packages/` directory"});this.install=ge.String("-i,--install",!1,{tolerateBoolean:!0,description:"Initialize a package with a specific bundle that will be locked in the project"});this.name=ge.String("-n,--name",{description:"Initialize a package with the given name"});this.usev2=ge.Boolean("-2",!1,{hidden:!0});this.yes=ge.Boolean("-y,--yes",{hidden:!0})}static{this.paths=[["init"]]}static{this.usage=it.Usage({description:"create a new package",details:"\n This command will setup a new package in your local directory.\n\n If the `-p,--private` or `-w,--workspace` options are set, the package will be private by default.\n\n If the `-w,--workspace` option is set, the package will be configured to accept a set of workspaces in the `packages/` directory.\n\n If the `-i,--install` option is given a value, Yarn will first download it using `yarn set version` and only then forward the init call to the newly downloaded bundle. Without arguments, the downloaded bundle will be `latest`.\n\n The initial settings of the manifest can be changed by using the `initScope` and `initFields` configuration values. Additionally, Yarn will generate an EditorConfig file whose rules can be altered via `initEditorConfig`, and will initialize a Git repository in the current directory.\n ",examples:[["Create a new package in the local directory","yarn init"],["Create a new private package in the local directory","yarn init -p"],["Create a new package and store the Yarn release inside","yarn init -i=latest"],["Create a new private package and defines it as a workspace root","yarn init -w"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=typeof this.install=="string"?this.install:this.usev2||this.install===!0?"latest":null;return o!==null?await this.executeProxy(r,o):await this.executeRegular(r)}async executeProxy(r,o){if(r.projectCwd!==null&&r.projectCwd!==this.context.cwd)throw new st("Cannot use the --install flag from within a project subdirectory");oe.existsSync(this.context.cwd)||await oe.mkdirPromise(this.context.cwd,{recursive:!0});let a=K.join(this.context.cwd,dr.lockfile);oe.existsSync(a)||await oe.writeFilePromise(a,"");let n=await this.cli.run(["set","version",o],{quiet:!0});if(n!==0)return n;let u=[];return this.private&&u.push("-p"),this.workspace&&u.push("-w"),this.name&&u.push(`-n=${this.name}`),this.yes&&u.push("-y"),await oe.mktempPromise(async A=>{let{code:p}=await Ur.pipevp("yarn",["init",...u],{cwd:this.context.cwd,stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr,env:await An.makeScriptEnv({binFolder:A})});return p})}async executeRegular(r){let o=null;try{o=(await kt.find(r,this.context.cwd)).project}catch{o=null}oe.existsSync(this.context.cwd)||await oe.mkdirPromise(this.context.cwd,{recursive:!0});let a=await Ut.tryFind(this.context.cwd),n=a??new Ut,u=Object.fromEntries(r.get("initFields").entries());n.load(u),n.name=n.name??G.makeIdent(r.get("initScope"),this.name??K.basename(this.context.cwd)),n.packageManager=nn&&He.isTaggedYarnVersion(nn)?`yarn@${nn}`:null,(!a&&this.workspace||this.private)&&(n.private=!0),this.workspace&&n.workspaceDefinitions.length===0&&(await oe.mkdirPromise(K.join(this.context.cwd,"packages"),{recursive:!0}),n.workspaceDefinitions=[{pattern:"packages/*"}]);let A={};n.exportTo(A);let p=K.join(this.context.cwd,Ut.fileName);await oe.changeFilePromise(p,`${JSON.stringify(A,null,2)} +`,{automaticNewlines:!0});let h=[p],E=K.join(this.context.cwd,"README.md");if(oe.existsSync(E)||(await oe.writeFilePromise(E,`# ${G.stringifyIdent(n.name)} +`),h.push(E)),!o||o.cwd===this.context.cwd){let I=K.join(this.context.cwd,dr.lockfile);oe.existsSync(I)||(await oe.writeFilePromise(I,""),h.push(I));let x=[".yarn/*","!.yarn/patches","!.yarn/plugins","!.yarn/releases","!.yarn/sdks","!.yarn/versions","","# Swap the comments on the following lines if you wish to use zero-installs","# In that case, don't forget to run `yarn config set enableGlobalCache false`!","# Documentation here: https://yarnpkg.com/features/caching#zero-installs","","#!.yarn/cache",".pnp.*"].map(le=>`${le} +`).join(""),C=K.join(this.context.cwd,".gitignore");oe.existsSync(C)||(await oe.writeFilePromise(C,x),h.push(C));let L=["/.yarn/** linguist-vendored","/.yarn/releases/* binary","/.yarn/plugins/**/* binary","/.pnp.* binary linguist-generated"].map(le=>`${le} +`).join(""),U=K.join(this.context.cwd,".gitattributes");oe.existsSync(U)||(await oe.writeFilePromise(U,L),h.push(U));let z={"*":{endOfLine:"lf",insertFinalNewline:!0},"*.{js,json,yml}":{charset:"utf-8",indentStyle:"space",indentSize:2}};He.mergeIntoTarget(z,r.get("initEditorConfig"));let te=`root = true +`;for(let[le,ce]of Object.entries(z)){te+=` +[${le}] +`;for(let[Ce,de]of Object.entries(ce)){let Be=Ce.replace(/[A-Z]/g,Ee=>`_${Ee.toLowerCase()}`);te+=`${Be} = ${de} +`}}let ae=K.join(this.context.cwd,".editorconfig");oe.existsSync(ae)||(await oe.writeFilePromise(ae,te),h.push(ae)),await this.cli.run(["install"],{quiet:!0}),oe.existsSync(K.join(this.context.cwd,".git"))||(await Ur.execvp("git",["init"],{cwd:this.context.cwd}),await Ur.execvp("git",["add","--",...h],{cwd:this.context.cwd}),await Ur.execvp("git",["commit","--allow-empty","-m","First commit"],{cwd:this.context.cwd}))}}};var Cdt={configuration:{initScope:{description:"Scope used when creating packages via the init command",type:"STRING",default:null},initFields:{description:"Additional fields to set when creating packages via the init command",type:"MAP",valueDefinition:{description:"",type:"ANY"}},initEditorConfig:{description:"Extra rules to define in the generator editorconfig",type:"MAP",valueDefinition:{description:"",type:"ANY"}}},commands:[jE]},wdt=Cdt;var kq={};Vt(kq,{SearchCommand:()=>oC,UpgradeInteractiveCommand:()=>aC,default:()=>cIt});Ge();var vme=Ze(ve("os"));function GE({stdout:t}){if(vme.default.endianness()==="BE")throw new Error("Interactive commands cannot be used on big-endian systems because ink depends on yoga-layout-prebuilt which only supports little-endian architectures");if(!t.isTTY)throw new Error("Interactive commands can only be used inside a TTY environment")}qt();var Lye=Ze(YH()),WH={appId:"OFCNCOG2CU",apiKey:"6fe4476ee5a1832882e326b506d14126",indexName:"npm-search"},yyt=(0,Lye.default)(WH.appId,WH.apiKey).initIndex(WH.indexName),KH=async(t,e=0)=>await yyt.search(t,{analyticsTags:["yarn-plugin-interactive-tools"],attributesToRetrieve:["name","version","owner","repository","humanDownloadsLast30Days"],page:e,hitsPerPage:10});var vB=["regular","dev","peer"],oC=class extends ut{static{this.paths=[["search"]]}static{this.usage=it.Usage({category:"Interactive commands",description:"open the search interface",details:` + This command opens a fullscreen terminal interface where you can search for and install packages from the npm registry. + `,examples:[["Open the search window","yarn search"]]})}async execute(){GE(this.context);let{Gem:e}=await Promise.resolve().then(()=>(Zk(),Eq)),{ScrollableItems:r}=await Promise.resolve().then(()=>(rQ(),tQ)),{useKeypress:o}=await Promise.resolve().then(()=>(wB(),Xwe)),{useMinistore:a}=await Promise.resolve().then(()=>(Dq(),vq)),{renderForm:n}=await Promise.resolve().then(()=>(oQ(),sQ)),{default:u}=await Promise.resolve().then(()=>Ze(aIe())),{Box:A,Text:p}=await Promise.resolve().then(()=>Ze(ic())),{default:h,useEffect:E,useState:I}=await Promise.resolve().then(()=>Ze(an())),v=await Ke.find(this.context.cwd,this.context.plugins),x=()=>h.createElement(A,{flexDirection:"row"},h.createElement(A,{flexDirection:"column",width:48},h.createElement(A,null,h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},""),"/",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to move between packages.")),h.createElement(A,null,h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to select a package.")),h.createElement(A,null,h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," again to change the target."))),h.createElement(A,{flexDirection:"column"},h.createElement(A,{marginLeft:1},h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to install the selected packages.")),h.createElement(A,{marginLeft:1},h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to abort.")))),C=()=>h.createElement(h.Fragment,null,h.createElement(A,{width:15},h.createElement(p,{bold:!0,underline:!0,color:"gray"},"Owner")),h.createElement(A,{width:11},h.createElement(p,{bold:!0,underline:!0,color:"gray"},"Version")),h.createElement(A,{width:10},h.createElement(p,{bold:!0,underline:!0,color:"gray"},"Downloads"))),R=()=>h.createElement(A,{width:17},h.createElement(p,{bold:!0,underline:!0,color:"gray"},"Target")),L=({hit:de,active:Be})=>{let[Ee,g]=a(de.name,null);o({active:Be},(Ae,ne)=>{if(ne.name!=="space")return;if(!Ee){g(vB[0]);return}let Z=vB.indexOf(Ee)+1;Z===vB.length?g(null):g(vB[Z])},[Ee,g]);let me=G.parseIdent(de.name),we=G.prettyIdent(v,me);return h.createElement(A,null,h.createElement(A,{width:45},h.createElement(p,{bold:!0,wrap:"wrap"},we)),h.createElement(A,{width:14,marginLeft:1},h.createElement(p,{bold:!0,wrap:"truncate"},de.owner.name)),h.createElement(A,{width:10,marginLeft:1},h.createElement(p,{italic:!0,wrap:"truncate"},de.version)),h.createElement(A,{width:16,marginLeft:1},h.createElement(p,null,de.humanDownloadsLast30Days)))},U=({name:de,active:Be})=>{let[Ee]=a(de,null),g=G.parseIdent(de);return h.createElement(A,null,h.createElement(A,{width:47},h.createElement(p,{bold:!0}," - ",G.prettyIdent(v,g))),vB.map(me=>h.createElement(A,{key:me,width:14,marginLeft:1},h.createElement(p,null," ",h.createElement(e,{active:Ee===me})," ",h.createElement(p,{bold:!0},me)))))},z=()=>h.createElement(A,{marginTop:1},h.createElement(p,null,"Powered by Algolia.")),ae=await n(({useSubmit:de})=>{let Be=a();de(Be);let Ee=Array.from(Be.keys()).filter(H=>Be.get(H)!==null),[g,me]=I(""),[we,Ae]=I(0),[ne,Z]=I([]),xe=H=>{H.match(/\t| /)||me(H)},Ne=async()=>{Ae(0);let H=await KH(g);H.query===g&&Z(H.hits)},ht=async()=>{let H=await KH(g,we+1);H.query===g&&H.page-1===we&&(Ae(H.page),Z([...ne,...H.hits]))};return E(()=>{g?Ne():Z([])},[g]),h.createElement(A,{flexDirection:"column"},h.createElement(x,null),h.createElement(A,{flexDirection:"row",marginTop:1},h.createElement(p,{bold:!0},"Search: "),h.createElement(A,{width:41},h.createElement(u,{value:g,onChange:xe,placeholder:"i.e. babel, webpack, react...",showCursor:!1})),h.createElement(C,null)),ne.length?h.createElement(r,{radius:2,loop:!1,children:ne.map(H=>h.createElement(L,{key:H.name,hit:H,active:!1})),willReachEnd:ht}):h.createElement(p,{color:"gray"},"Start typing..."),h.createElement(A,{flexDirection:"row",marginTop:1},h.createElement(A,{width:49},h.createElement(p,{bold:!0},"Selected:")),h.createElement(R,null)),Ee.length?Ee.map(H=>h.createElement(U,{key:H,name:H,active:!1})):h.createElement(p,{color:"gray"},"No selected packages..."),h.createElement(z,null))},{},{stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr});if(typeof ae>"u")return 1;let le=Array.from(ae.keys()).filter(de=>ae.get(de)==="regular"),ce=Array.from(ae.keys()).filter(de=>ae.get(de)==="dev"),Ce=Array.from(ae.keys()).filter(de=>ae.get(de)==="peer");return le.length&&await this.cli.run(["add",...le]),ce.length&&await this.cli.run(["add","--dev",...ce]),Ce&&await this.cli.run(["add","--peer",...Ce]),0}};Ge();qt();f_();var hIe=Ze(Jn()),pIe=/^((?:[\^~]|>=?)?)([0-9]+)(\.[0-9]+)(\.[0-9]+)((?:-\S+)?)$/,gIe=(t,e)=>t.length>0?[t.slice(0,e)].concat(gIe(t.slice(e),e)):[],aC=class extends ut{static{this.paths=[["upgrade-interactive"]]}static{this.usage=it.Usage({category:"Interactive commands",description:"open the upgrade interface",details:` + This command opens a fullscreen terminal interface where you can see any out of date packages used by your application, their status compared to the latest versions available on the remote registry, and select packages to upgrade. + `,examples:[["Open the upgrade window","yarn upgrade-interactive"]]})}async execute(){GE(this.context);let{ItemOptions:e}=await Promise.resolve().then(()=>(fIe(),AIe)),{Pad:r}=await Promise.resolve().then(()=>(xq(),uIe)),{ScrollableItems:o}=await Promise.resolve().then(()=>(rQ(),tQ)),{useMinistore:a}=await Promise.resolve().then(()=>(Dq(),vq)),{renderForm:n}=await Promise.resolve().then(()=>(oQ(),sQ)),{Box:u,Text:A}=await Promise.resolve().then(()=>Ze(ic())),{default:p,useEffect:h,useRef:E,useState:I}=await Promise.resolve().then(()=>Ze(an())),v=await Ke.find(this.context.cwd,this.context.plugins),{project:x,workspace:C}=await kt.find(v,this.context.cwd),R=await Gr.find(v);if(!C)throw new sr(x.cwd,this.context.cwd);await x.restoreInstallState({restoreResolutions:!1});let L=this.context.stdout.rows-7,U=(me,we)=>{let Ae=gpe(me,we),ne="";for(let Z of Ae)Z.added?ne+=pe.pretty(v,Z.value,"green"):Z.removed||(ne+=Z.value);return ne},z=(me,we)=>{if(me===we)return we;let Ae=G.parseRange(me),ne=G.parseRange(we),Z=Ae.selector.match(pIe),xe=ne.selector.match(pIe);if(!Z||!xe)return U(me,we);let Ne=["gray","red","yellow","green","magenta"],ht=null,H="";for(let rt=1;rt{let ne=await Zc.fetchDescriptorFrom(me,Ae,{project:x,cache:R,preserveModifier:we,workspace:C});return ne!==null?ne.range:me.range},ae=async me=>{let we=hIe.default.valid(me.range)?`^${me.range}`:me.range,[Ae,ne]=await Promise.all([te(me,me.range,we).catch(()=>null),te(me,me.range,"latest").catch(()=>null)]),Z=[{value:null,label:me.range}];return Ae&&Ae!==me.range?Z.push({value:Ae,label:z(me.range,Ae)}):Z.push({value:null,label:""}),ne&&ne!==Ae&&ne!==me.range?Z.push({value:ne,label:z(me.range,ne)}):Z.push({value:null,label:""}),Z},le=()=>p.createElement(u,{flexDirection:"row"},p.createElement(u,{flexDirection:"column",width:49},p.createElement(u,{marginLeft:1},p.createElement(A,null,"Press ",p.createElement(A,{bold:!0,color:"cyanBright"},""),"/",p.createElement(A,{bold:!0,color:"cyanBright"},"")," to select packages.")),p.createElement(u,{marginLeft:1},p.createElement(A,null,"Press ",p.createElement(A,{bold:!0,color:"cyanBright"},""),"/",p.createElement(A,{bold:!0,color:"cyanBright"},"")," to select versions."))),p.createElement(u,{flexDirection:"column"},p.createElement(u,{marginLeft:1},p.createElement(A,null,"Press ",p.createElement(A,{bold:!0,color:"cyanBright"},"")," to install.")),p.createElement(u,{marginLeft:1},p.createElement(A,null,"Press ",p.createElement(A,{bold:!0,color:"cyanBright"},"")," to abort.")))),ce=()=>p.createElement(u,{flexDirection:"row",paddingTop:1,paddingBottom:1},p.createElement(u,{width:50},p.createElement(A,{bold:!0},p.createElement(A,{color:"greenBright"},"?")," Pick the packages you want to upgrade.")),p.createElement(u,{width:17},p.createElement(A,{bold:!0,underline:!0,color:"gray"},"Current")),p.createElement(u,{width:17},p.createElement(A,{bold:!0,underline:!0,color:"gray"},"Range")),p.createElement(u,{width:17},p.createElement(A,{bold:!0,underline:!0,color:"gray"},"Latest"))),Ce=({active:me,descriptor:we,suggestions:Ae})=>{let[ne,Z]=a(we.descriptorHash,null),xe=G.stringifyIdent(we),Ne=Math.max(0,45-xe.length);return p.createElement(p.Fragment,null,p.createElement(u,null,p.createElement(u,{width:45},p.createElement(A,{bold:!0},G.prettyIdent(v,we)),p.createElement(r,{active:me,length:Ne})),p.createElement(e,{active:me,options:Ae,value:ne,skewer:!0,onChange:Z,sizes:[17,17,17]})))},de=({dependencies:me})=>{let[we,Ae]=I(me.map(()=>null)),ne=E(!0),Z=async xe=>{let Ne=await ae(xe);return Ne.filter(ht=>ht.label!=="").length<=1?null:{descriptor:xe,suggestions:Ne}};return h(()=>()=>{ne.current=!1},[]),h(()=>{let xe=Math.trunc(L*1.75),Ne=me.slice(0,xe),ht=me.slice(xe),H=gIe(ht,L),rt=Ne.map(Z).reduce(async(Te,Fe)=>{await Te;let ke=await Fe;ke!==null&&ne.current&&Ae(Ye=>{let Se=Ye.findIndex(Ue=>Ue===null),et=[...Ye];return et[Se]=ke,et})},Promise.resolve());H.reduce((Te,Fe)=>Promise.all(Fe.map(ke=>Promise.resolve().then(()=>Z(ke)))).then(async ke=>{ke=ke.filter(Ye=>Ye!==null),await Te,ne.current&&Ae(Ye=>{let Se=Ye.findIndex(et=>et===null);return Ye.slice(0,Se).concat(ke).concat(Ye.slice(Se+ke.length))})}),rt).then(()=>{ne.current&&Ae(Te=>Te.filter(Fe=>Fe!==null))})},[]),we.length?p.createElement(o,{radius:L>>1,children:we.map((xe,Ne)=>xe!==null?p.createElement(Ce,{key:Ne,active:!1,descriptor:xe.descriptor,suggestions:xe.suggestions}):p.createElement(A,{key:Ne},"Loading..."))}):p.createElement(A,null,"No upgrades found")},Ee=await n(({useSubmit:me})=>{me(a());let we=new Map;for(let ne of x.workspaces)for(let Z of["dependencies","devDependencies"])for(let xe of ne.manifest[Z].values())x.tryWorkspaceByDescriptor(xe)===null&&(xe.range.startsWith("link:")||we.set(xe.descriptorHash,xe));let Ae=He.sortMap(we.values(),ne=>G.stringifyDescriptor(ne));return p.createElement(u,{flexDirection:"column"},p.createElement(le,null),p.createElement(ce,null),p.createElement(de,{dependencies:Ae}))},{},{stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr});if(typeof Ee>"u")return 1;let g=!1;for(let me of x.workspaces)for(let we of["dependencies","devDependencies"]){let Ae=me.manifest[we];for(let ne of Ae.values()){let Z=Ee.get(ne.descriptorHash);typeof Z<"u"&&Z!==null&&(Ae.set(ne.identHash,G.makeDescriptor(ne,Z)),g=!0)}}return g?await x.installWithNewReport({quiet:this.context.quiet,stdout:this.context.stdout},{cache:R}):0}};var lIt={commands:[oC,aC]},cIt=lIt;var Qq={};Vt(Qq,{LinkFetcher:()=>PB,LinkResolver:()=>SB,PortalFetcher:()=>bB,PortalResolver:()=>xB,default:()=>AIt});Ge();Pt();var Xf="portal:",Zf="link:";var PB=class{supports(e,r){return!!e.reference.startsWith(Zf)}getLocalPath(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:Zf});if(K.isAbsolute(a))return a;let n=r.fetcher.getLocalPath(o,r);return n===null?null:K.resolve(n,a)}async fetch(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:Zf}),n=K.isAbsolute(a)?{packageFs:new gn(It.root),prefixPath:It.dot,localPath:It.root}:await r.fetcher.fetch(o,r),u=n.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,n.localPath),localPath:It.root}:n;n!==u&&n.releaseFs&&n.releaseFs();let A=u.packageFs,p=K.resolve(u.localPath??u.packageFs.getRealPath(),u.prefixPath,a);return n.localPath?{packageFs:new gn(p,{baseFs:A}),releaseFs:u.releaseFs,prefixPath:It.dot,discardFromLookup:!0,localPath:p}:{packageFs:new qu(p,{baseFs:A}),releaseFs:u.releaseFs,prefixPath:It.dot,discardFromLookup:!0}}};Ge();Pt();var SB=class{supportsDescriptor(e,r){return!!e.range.startsWith(Zf)}supportsLocator(e,r){return!!e.reference.startsWith(Zf)}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){return G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){let a=e.range.slice(Zf.length);return[G.makeLocator(e,`${Zf}${ue.toPortablePath(a)}`)]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){return{...e,version:"0.0.0",languageName:r.project.configuration.get("defaultLanguageName"),linkType:"SOFT",conditions:null,dependencies:new Map,peerDependencies:new Map,dependenciesMeta:new Map,peerDependenciesMeta:new Map,bin:new Map}}};Ge();Pt();var bB=class{supports(e,r){return!!e.reference.startsWith(Xf)}getLocalPath(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:Xf});if(K.isAbsolute(a))return a;let n=r.fetcher.getLocalPath(o,r);return n===null?null:K.resolve(n,a)}async fetch(e,r){let{parentLocator:o,path:a}=G.parseFileStyleRange(e.reference,{protocol:Xf}),n=K.isAbsolute(a)?{packageFs:new gn(It.root),prefixPath:It.dot,localPath:It.root}:await r.fetcher.fetch(o,r),u=n.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,n.localPath),localPath:It.root}:n;n!==u&&n.releaseFs&&n.releaseFs();let A=u.packageFs,p=K.resolve(u.localPath??u.packageFs.getRealPath(),u.prefixPath,a);return n.localPath?{packageFs:new gn(p,{baseFs:A}),releaseFs:u.releaseFs,prefixPath:It.dot,localPath:p}:{packageFs:new qu(p,{baseFs:A}),releaseFs:u.releaseFs,prefixPath:It.dot}}};Ge();Ge();Pt();var xB=class{supportsDescriptor(e,r){return!!e.range.startsWith(Xf)}supportsLocator(e,r){return!!e.reference.startsWith(Xf)}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){return G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){let a=e.range.slice(Xf.length);return[G.makeLocator(e,`${Xf}${ue.toPortablePath(a)}`)]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){if(!r.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let o=await r.fetchOptions.fetcher.fetch(e,r.fetchOptions),a=await He.releaseAfterUseAsync(async()=>await Ut.find(o.prefixPath,{baseFs:o.packageFs}),o.releaseFs);return{...e,version:a.version||"0.0.0",languageName:a.languageName||r.project.configuration.get("defaultLanguageName"),linkType:"SOFT",conditions:a.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(a.dependencies),peerDependencies:a.peerDependencies,dependenciesMeta:a.dependenciesMeta,peerDependenciesMeta:a.peerDependenciesMeta,bin:a.bin}}};var uIt={fetchers:[PB,bB],resolvers:[SB,xB]},AIt=uIt;var hj={};Vt(hj,{NodeModulesLinker:()=>GB,NodeModulesMode:()=>uj,PnpLooseLinker:()=>YB,default:()=>S1t});Pt();Ge();Pt();Pt();var Rq=(t,e)=>`${t}@${e}`,dIe=(t,e)=>{let r=e.indexOf("#"),o=r>=0?e.substring(r+1):e;return Rq(t,o)};var yIe=(t,e={})=>{let r=e.debugLevel||Number(process.env.NM_DEBUG_LEVEL||-1),o=e.check||r>=9,a=e.hoistingLimits||new Map,n={check:o,debugLevel:r,hoistingLimits:a,fastLookupPossible:!0},u;n.debugLevel>=0&&(u=Date.now());let A=yIt(t,n),p=!1,h=0;do p=Tq(A,[A],new Set([A.locator]),new Map,n).anotherRoundNeeded,n.fastLookupPossible=!1,h++;while(p);if(n.debugLevel>=0&&console.log(`hoist time: ${Date.now()-u}ms, rounds: ${h}`),n.debugLevel>=1){let E=kB(A);if(Tq(A,[A],new Set([A.locator]),new Map,n).isGraphChanged)throw new Error(`The hoisting result is not terminal, prev tree: +${E}, next tree: +${kB(A)}`);let v=EIe(A);if(v)throw new Error(`${v}, after hoisting finished: +${kB(A)}`)}return n.debugLevel>=2&&console.log(kB(A)),EIt(A)},fIt=t=>{let e=t[t.length-1],r=new Map,o=new Set,a=n=>{if(!o.has(n)){o.add(n);for(let u of n.hoistedDependencies.values())r.set(u.name,u);for(let u of n.dependencies.values())n.peerNames.has(u.name)||a(u)}};return a(e),r},pIt=t=>{let e=t[t.length-1],r=new Map,o=new Set,a=new Set,n=(u,A)=>{if(o.has(u))return;o.add(u);for(let h of u.hoistedDependencies.values())if(!A.has(h.name)){let E;for(let I of t)E=I.dependencies.get(h.name),E&&r.set(E.name,E)}let p=new Set;for(let h of u.dependencies.values())p.add(h.name);for(let h of u.dependencies.values())u.peerNames.has(h.name)||n(h,p)};return n(e,a),r},mIe=(t,e)=>{if(e.decoupled)return e;let{name:r,references:o,ident:a,locator:n,dependencies:u,originalDependencies:A,hoistedDependencies:p,peerNames:h,reasons:E,isHoistBorder:I,hoistPriority:v,dependencyKind:x,hoistedFrom:C,hoistedTo:R}=e,L={name:r,references:new Set(o),ident:a,locator:n,dependencies:new Map(u),originalDependencies:new Map(A),hoistedDependencies:new Map(p),peerNames:new Set(h),reasons:new Map(E),decoupled:!0,isHoistBorder:I,hoistPriority:v,dependencyKind:x,hoistedFrom:new Map(C),hoistedTo:new Map(R)},U=L.dependencies.get(r);return U&&U.ident==L.ident&&L.dependencies.set(r,L),t.dependencies.set(L.name,L),L},hIt=(t,e)=>{let r=new Map([[t.name,[t.ident]]]);for(let a of t.dependencies.values())t.peerNames.has(a.name)||r.set(a.name,[a.ident]);let o=Array.from(e.keys());o.sort((a,n)=>{let u=e.get(a),A=e.get(n);return A.hoistPriority!==u.hoistPriority?A.hoistPriority-u.hoistPriority:A.peerDependents.size!==u.peerDependents.size?A.peerDependents.size-u.peerDependents.size:A.dependents.size-u.dependents.size});for(let a of o){let n=a.substring(0,a.indexOf("@",1)),u=a.substring(n.length+1);if(!t.peerNames.has(n)){let A=r.get(n);A||(A=[],r.set(n,A)),A.indexOf(u)<0&&A.push(u)}}return r},Fq=t=>{let e=new Set,r=(o,a=new Set)=>{if(!a.has(o)){a.add(o);for(let n of o.peerNames)if(!t.peerNames.has(n)){let u=t.dependencies.get(n);u&&!e.has(u)&&r(u,a)}e.add(o)}};for(let o of t.dependencies.values())t.peerNames.has(o.name)||r(o);return e},Tq=(t,e,r,o,a,n=new Set)=>{let u=e[e.length-1];if(n.has(u))return{anotherRoundNeeded:!1,isGraphChanged:!1};n.add(u);let A=CIt(u),p=hIt(u,A),h=t==u?new Map:a.fastLookupPossible?fIt(e):pIt(e),E,I=!1,v=!1,x=new Map(Array.from(p.entries()).map(([R,L])=>[R,L[0]])),C=new Map;do{let R=mIt(t,e,r,h,x,p,o,C,a);R.isGraphChanged&&(v=!0),R.anotherRoundNeeded&&(I=!0),E=!1;for(let[L,U]of p)U.length>1&&!u.dependencies.has(L)&&(x.delete(L),U.shift(),x.set(L,U[0]),E=!0)}while(E);for(let R of u.dependencies.values())if(!u.peerNames.has(R.name)&&!r.has(R.locator)){r.add(R.locator);let L=Tq(t,[...e,R],r,C,a);L.isGraphChanged&&(v=!0),L.anotherRoundNeeded&&(I=!0),r.delete(R.locator)}return{anotherRoundNeeded:I,isGraphChanged:v}},gIt=t=>{for(let[e,r]of t.dependencies)if(!t.peerNames.has(e)&&r.ident!==t.ident)return!0;return!1},dIt=(t,e,r,o,a,n,u,A,{outputReason:p,fastLookupPossible:h})=>{let E,I=null,v=new Set;p&&(E=`${Array.from(e).map(L=>no(L)).join("\u2192")}`);let x=r[r.length-1],R=!(o.ident===x.ident);if(p&&!R&&(I="- self-reference"),R&&(R=o.dependencyKind!==1,p&&!R&&(I="- workspace")),R&&o.dependencyKind===2&&(R=!gIt(o),p&&!R&&(I="- external soft link with unhoisted dependencies")),R&&(R=x.dependencyKind!==1||x.hoistedFrom.has(o.name)||e.size===1,p&&!R&&(I=x.reasons.get(o.name))),R&&(R=!t.peerNames.has(o.name),p&&!R&&(I=`- cannot shadow peer: ${no(t.originalDependencies.get(o.name).locator)} at ${E}`)),R){let L=!1,U=a.get(o.name);if(L=!U||U.ident===o.ident,p&&!L&&(I=`- filled by: ${no(U.locator)} at ${E}`),L)for(let z=r.length-1;z>=1;z--){let ae=r[z].dependencies.get(o.name);if(ae&&ae.ident!==o.ident){L=!1;let le=A.get(x);le||(le=new Set,A.set(x,le)),le.add(o.name),p&&(I=`- filled by ${no(ae.locator)} at ${r.slice(0,z).map(ce=>no(ce.locator)).join("\u2192")}`);break}}R=L}if(R&&(R=n.get(o.name)===o.ident,p&&!R&&(I=`- filled by: ${no(u.get(o.name)[0])} at ${E}`)),R){let L=!0,U=new Set(o.peerNames);for(let z=r.length-1;z>=1;z--){let te=r[z];for(let ae of U){if(te.peerNames.has(ae)&&te.originalDependencies.has(ae))continue;let le=te.dependencies.get(ae);le&&t.dependencies.get(ae)!==le&&(z===r.length-1?v.add(le):(v=null,L=!1,p&&(I=`- peer dependency ${no(le.locator)} from parent ${no(te.locator)} was not hoisted to ${E}`))),U.delete(ae)}if(!L)break}R=L}if(R&&!h)for(let L of o.hoistedDependencies.values()){let U=a.get(L.name)||t.dependencies.get(L.name);if(!U||L.ident!==U.ident){R=!1,p&&(I=`- previously hoisted dependency mismatch, needed: ${no(L.locator)}, available: ${no(U?.locator)}`);break}}return v!==null&&v.size>0?{isHoistable:2,dependsOn:v,reason:I}:{isHoistable:R?0:1,reason:I}},aQ=t=>`${t.name}@${t.locator}`,mIt=(t,e,r,o,a,n,u,A,p)=>{let h=e[e.length-1],E=new Set,I=!1,v=!1,x=(U,z,te,ae,le)=>{if(E.has(ae))return;let ce=[...z,aQ(ae)],Ce=[...te,aQ(ae)],de=new Map,Be=new Map;for(let Ae of Fq(ae)){let ne=dIt(h,r,[h,...U,ae],Ae,o,a,n,A,{outputReason:p.debugLevel>=2,fastLookupPossible:p.fastLookupPossible});if(Be.set(Ae,ne),ne.isHoistable===2)for(let Z of ne.dependsOn){let xe=de.get(Z.name)||new Set;xe.add(Ae.name),de.set(Z.name,xe)}}let Ee=new Set,g=(Ae,ne,Z)=>{if(!Ee.has(Ae)){Ee.add(Ae),Be.set(Ae,{isHoistable:1,reason:Z});for(let xe of de.get(Ae.name)||[])g(ae.dependencies.get(xe),ne,p.debugLevel>=2?`- peer dependency ${no(Ae.locator)} from parent ${no(ae.locator)} was not hoisted`:"")}};for(let[Ae,ne]of Be)ne.isHoistable===1&&g(Ae,ne,ne.reason);let me=!1;for(let Ae of Be.keys())if(!Ee.has(Ae)){v=!0;let ne=u.get(ae);ne&&ne.has(Ae.name)&&(I=!0),me=!0,ae.dependencies.delete(Ae.name),ae.hoistedDependencies.set(Ae.name,Ae),ae.reasons.delete(Ae.name);let Z=h.dependencies.get(Ae.name);if(p.debugLevel>=2){let xe=Array.from(z).concat([ae.locator]).map(ht=>no(ht)).join("\u2192"),Ne=h.hoistedFrom.get(Ae.name);Ne||(Ne=[],h.hoistedFrom.set(Ae.name,Ne)),Ne.push(xe),ae.hoistedTo.set(Ae.name,Array.from(e).map(ht=>no(ht.locator)).join("\u2192"))}if(!Z)h.ident!==Ae.ident&&(h.dependencies.set(Ae.name,Ae),le.add(Ae));else for(let xe of Ae.references)Z.references.add(xe)}if(ae.dependencyKind===2&&me&&(I=!0),p.check){let Ae=EIe(t);if(Ae)throw new Error(`${Ae}, after hoisting dependencies of ${[h,...U,ae].map(ne=>no(ne.locator)).join("\u2192")}: +${kB(t)}`)}let we=Fq(ae);for(let Ae of we)if(Ee.has(Ae)){let ne=Be.get(Ae);if((a.get(Ae.name)===Ae.ident||!ae.reasons.has(Ae.name))&&ne.isHoistable!==0&&ae.reasons.set(Ae.name,ne.reason),!Ae.isHoistBorder&&Ce.indexOf(aQ(Ae))<0){E.add(ae);let xe=mIe(ae,Ae);x([...U,ae],ce,Ce,xe,R),E.delete(ae)}}},C,R=new Set(Fq(h)),L=Array.from(e).map(U=>aQ(U));do{C=R,R=new Set;for(let U of C){if(U.locator===h.locator||U.isHoistBorder)continue;let z=mIe(h,U);x([],Array.from(r),L,z,R)}}while(R.size>0);return{anotherRoundNeeded:I,isGraphChanged:v}},EIe=t=>{let e=[],r=new Set,o=new Set,a=(n,u,A)=>{if(r.has(n)||(r.add(n),o.has(n)))return;let p=new Map(u);for(let h of n.dependencies.values())n.peerNames.has(h.name)||p.set(h.name,h);for(let h of n.originalDependencies.values()){let E=p.get(h.name),I=()=>`${Array.from(o).concat([n]).map(v=>no(v.locator)).join("\u2192")}`;if(n.peerNames.has(h.name)){let v=u.get(h.name);(v!==E||!v||v.ident!==h.ident)&&e.push(`${I()} - broken peer promise: expected ${h.ident} but found ${v&&v.ident}`)}else{let v=A.hoistedFrom.get(n.name),x=n.hoistedTo.get(h.name),C=`${v?` hoisted from ${v.join(", ")}`:""}`,R=`${x?` hoisted to ${x}`:""}`,L=`${I()}${C}`;E?E.ident!==h.ident&&e.push(`${L} - broken require promise for ${h.name}${R}: expected ${h.ident}, but found: ${E.ident}`):e.push(`${L} - broken require promise: no required dependency ${h.name}${R} found`)}}o.add(n);for(let h of n.dependencies.values())n.peerNames.has(h.name)||a(h,p,n);o.delete(n)};return a(t,t.dependencies,t),e.join(` +`)},yIt=(t,e)=>{let{identName:r,name:o,reference:a,peerNames:n}=t,u={name:o,references:new Set([a]),locator:Rq(r,a),ident:dIe(r,a),dependencies:new Map,originalDependencies:new Map,hoistedDependencies:new Map,peerNames:new Set(n),reasons:new Map,decoupled:!0,isHoistBorder:!0,hoistPriority:0,dependencyKind:1,hoistedFrom:new Map,hoistedTo:new Map},A=new Map([[t,u]]),p=(h,E)=>{let I=A.get(h),v=!!I;if(!I){let{name:x,identName:C,reference:R,peerNames:L,hoistPriority:U,dependencyKind:z}=h,te=e.hoistingLimits.get(E.locator);I={name:x,references:new Set([R]),locator:Rq(C,R),ident:dIe(C,R),dependencies:new Map,originalDependencies:new Map,hoistedDependencies:new Map,peerNames:new Set(L),reasons:new Map,decoupled:!0,isHoistBorder:te?te.has(x):!1,hoistPriority:U||0,dependencyKind:z||0,hoistedFrom:new Map,hoistedTo:new Map},A.set(h,I)}if(E.dependencies.set(h.name,I),E.originalDependencies.set(h.name,I),v){let x=new Set,C=R=>{if(!x.has(R)){x.add(R),R.decoupled=!1;for(let L of R.dependencies.values())R.peerNames.has(L.name)||C(L)}};C(I)}else for(let x of h.dependencies)p(x,I)};for(let h of t.dependencies)p(h,u);return u},Nq=t=>t.substring(0,t.indexOf("@",1)),EIt=t=>{let e={name:t.name,identName:Nq(t.locator),references:new Set(t.references),dependencies:new Set},r=new Set([t]),o=(a,n,u)=>{let A=r.has(a),p;if(n===a)p=u;else{let{name:h,references:E,locator:I}=a;p={name:h,identName:Nq(I),references:E,dependencies:new Set}}if(u.dependencies.add(p),!A){r.add(a);for(let h of a.dependencies.values())a.peerNames.has(h.name)||o(h,a,p);r.delete(a)}};for(let a of t.dependencies.values())o(a,t,e);return e},CIt=t=>{let e=new Map,r=new Set([t]),o=u=>`${u.name}@${u.ident}`,a=u=>{let A=o(u),p=e.get(A);return p||(p={dependents:new Set,peerDependents:new Set,hoistPriority:0},e.set(A,p)),p},n=(u,A)=>{let p=!!r.has(A);if(a(A).dependents.add(u.ident),!p){r.add(A);for(let E of A.dependencies.values()){let I=a(E);I.hoistPriority=Math.max(I.hoistPriority,E.hoistPriority),A.peerNames.has(E.name)?I.peerDependents.add(A.ident):n(A,E)}}};for(let u of t.dependencies.values())t.peerNames.has(u.name)||n(t,u);return e},no=t=>{if(!t)return"none";let e=t.indexOf("@",1),r=t.substring(0,e);r.endsWith("$wsroot$")&&(r=`wh:${r.replace("$wsroot$","")}`);let o=t.substring(e+1);if(o==="workspace:.")return".";if(o){let a=(o.indexOf("#")>0?o.split("#")[1]:o).replace("npm:","");return o.startsWith("virtual")&&(r=`v:${r}`),a.startsWith("workspace")&&(r=`w:${r}`,a=""),`${r}${a?`@${a}`:""}`}else return`${r}`};var kB=t=>{let e=0,r=(a,n,u="")=>{if(e>5e4||n.has(a))return"";e++;let A=Array.from(a.dependencies.values()).sort((h,E)=>h.name===E.name?0:h.name>E.name?1:-1),p="";n.add(a);for(let h=0;h":"")+(v!==E.name?`a:${E.name}:`:"")+no(E.locator)+(I?` ${I}`:"")} +`,p+=r(E,n,`${u}${h5e4?` +Tree is too large, part of the tree has been dunped +`:"")};var QB=(o=>(o.WORKSPACES="workspaces",o.DEPENDENCIES="dependencies",o.NONE="none",o))(QB||{}),CIe="node_modules",Oh="$wsroot$";var FB=(t,e)=>{let{packageTree:r,hoistingLimits:o,errors:a,preserveSymlinksRequired:n}=IIt(t,e),u=null;if(a.length===0){let A=yIe(r,{hoistingLimits:o});u=vIt(t,A,e)}return{tree:u,errors:a,preserveSymlinksRequired:n}},gA=t=>`${t.name}@${t.reference}`,Mq=t=>{let e=new Map;for(let[r,o]of t.entries())if(!o.dirList){let a=e.get(o.locator);a||(a={target:o.target,linkType:o.linkType,locations:[],aliases:o.aliases},e.set(o.locator,a)),a.locations.push(r)}for(let r of e.values())r.locations=r.locations.sort((o,a)=>{let n=o.split(K.delimiter).length,u=a.split(K.delimiter).length;return a===o?0:n!==u?u-n:a>o?1:-1});return e},wIe=(t,e)=>{let r=G.isVirtualLocator(t)?G.devirtualizeLocator(t):t,o=G.isVirtualLocator(e)?G.devirtualizeLocator(e):e;return G.areLocatorsEqual(r,o)},Lq=(t,e,r,o)=>{if(t.linkType!=="SOFT")return!1;let a=ue.toPortablePath(r.resolveVirtual&&e.reference&&e.reference.startsWith("virtual:")?r.resolveVirtual(t.packageLocation):t.packageLocation);return K.contains(o,a)===null},wIt=t=>{let e=t.getPackageInformation(t.topLevel);if(e===null)throw new Error("Assertion failed: Expected the top-level package to have been registered");if(t.findPackageLocator(e.packageLocation)===null)throw new Error("Assertion failed: Expected the top-level package to have a physical locator");let o=ue.toPortablePath(e.packageLocation.slice(0,-1)),a=new Map,n={children:new Map},u=t.getDependencyTreeRoots(),A=new Map,p=new Set,h=(v,x)=>{let C=gA(v);if(p.has(C))return;p.add(C);let R=t.getPackageInformation(v);if(R){let L=x?gA(x):"";if(gA(v)!==L&&R.linkType==="SOFT"&&!v.reference.startsWith("link:")&&!Lq(R,v,t,o)){let U=IIe(R,v,t);(!A.get(U)||v.reference.startsWith("workspace:"))&&A.set(U,v)}for(let[U,z]of R.packageDependencies)z!==null&&(R.packagePeers.has(U)||h(t.getLocator(U,z),v))}};for(let v of u)h(v,null);let E=o.split(K.sep);for(let v of A.values()){let x=t.getPackageInformation(v),R=ue.toPortablePath(x.packageLocation.slice(0,-1)).split(K.sep).slice(E.length),L=n;for(let U of R){let z=L.children.get(U);z||(z={children:new Map},L.children.set(U,z)),L=z}L.workspaceLocator=v}let I=(v,x)=>{if(v.workspaceLocator){let C=gA(x),R=a.get(C);R||(R=new Set,a.set(C,R)),R.add(v.workspaceLocator)}for(let C of v.children.values())I(C,v.workspaceLocator||x)};for(let v of n.children.values())I(v,n.workspaceLocator);return a},IIt=(t,e)=>{let r=[],o=!1,a=new Map,n=wIt(t),u=t.getPackageInformation(t.topLevel);if(u===null)throw new Error("Assertion failed: Expected the top-level package to have been registered");let A=t.findPackageLocator(u.packageLocation);if(A===null)throw new Error("Assertion failed: Expected the top-level package to have a physical locator");let p=ue.toPortablePath(u.packageLocation.slice(0,-1)),h={name:A.name,identName:A.name,reference:A.reference,peerNames:u.packagePeers,dependencies:new Set,dependencyKind:1},E=new Map,I=(x,C)=>`${gA(C)}:${x}`,v=(x,C,R,L,U,z,te,ae)=>{let le=I(x,R),ce=E.get(le),Ce=!!ce;!Ce&&R.name===A.name&&R.reference===A.reference&&(ce=h,E.set(le,h));let de=Lq(C,R,t,p);if(!ce){let Ae=0;de?Ae=2:C.linkType==="SOFT"&&R.name.endsWith(Oh)&&(Ae=1),ce={name:x,identName:R.name,reference:R.reference,dependencies:new Set,peerNames:Ae===1?new Set:C.packagePeers,dependencyKind:Ae},E.set(le,ce)}let Be;if(de?Be=2:U.linkType==="SOFT"?Be=1:Be=0,ce.hoistPriority=Math.max(ce.hoistPriority||0,Be),ae&&!de){let Ae=gA({name:L.identName,reference:L.reference}),ne=a.get(Ae)||new Set;a.set(Ae,ne),ne.add(ce.name)}let Ee=new Map(C.packageDependencies);if(e.project){let Ae=e.project.workspacesByCwd.get(ue.toPortablePath(C.packageLocation.slice(0,-1)));if(Ae){let ne=new Set([...Array.from(Ae.manifest.peerDependencies.values(),Z=>G.stringifyIdent(Z)),...Array.from(Ae.manifest.peerDependenciesMeta.keys())]);for(let Z of ne)Ee.has(Z)||(Ee.set(Z,z.get(Z)||null),ce.peerNames.add(Z))}}let g=gA({name:R.name.replace(Oh,""),reference:R.reference}),me=n.get(g);if(me)for(let Ae of me)Ee.set(`${Ae.name}${Oh}`,Ae.reference);(C!==U||C.linkType!=="SOFT"||!de&&(!e.selfReferencesByCwd||e.selfReferencesByCwd.get(te)))&&L.dependencies.add(ce);let we=R!==A&&C.linkType==="SOFT"&&!R.name.endsWith(Oh)&&!de;if(!Ce&&!we){let Ae=new Map;for(let[ne,Z]of Ee)if(Z!==null){let xe=t.getLocator(ne,Z),Ne=t.getLocator(ne.replace(Oh,""),Z),ht=t.getPackageInformation(Ne);if(ht===null)throw new Error("Assertion failed: Expected the package to have been registered");let H=Lq(ht,xe,t,p);if(e.validateExternalSoftLinks&&e.project&&H){ht.packageDependencies.size>0&&(o=!0);for(let[Ye,Se]of ht.packageDependencies)if(Se!==null){let et=G.parseLocator(Array.isArray(Se)?`${Se[0]}@${Se[1]}`:`${Ye}@${Se}`);if(gA(et)!==gA(xe)){let Ue=Ee.get(Ye);if(Ue){let b=G.parseLocator(Array.isArray(Ue)?`${Ue[0]}@${Ue[1]}`:`${Ye}@${Ue}`);wIe(b,et)||r.push({messageName:71,text:`Cannot link ${G.prettyIdent(e.project.configuration,G.parseIdent(xe.name))} into ${G.prettyLocator(e.project.configuration,G.parseLocator(`${R.name}@${R.reference}`))} dependency ${G.prettyLocator(e.project.configuration,et)} conflicts with parent dependency ${G.prettyLocator(e.project.configuration,b)}`})}else{let b=Ae.get(Ye);if(b){let w=b.target,S=G.parseLocator(Array.isArray(w)?`${w[0]}@${w[1]}`:`${Ye}@${w}`);wIe(S,et)||r.push({messageName:71,text:`Cannot link ${G.prettyIdent(e.project.configuration,G.parseIdent(xe.name))} into ${G.prettyLocator(e.project.configuration,G.parseLocator(`${R.name}@${R.reference}`))} dependency ${G.prettyLocator(e.project.configuration,et)} conflicts with dependency ${G.prettyLocator(e.project.configuration,S)} from sibling portal ${G.prettyIdent(e.project.configuration,G.parseIdent(b.portal.name))}`})}else Ae.set(Ye,{target:et.reference,portal:xe})}}}}let rt=e.hoistingLimitsByCwd?.get(te),Te=H?te:K.relative(p,ue.toPortablePath(ht.packageLocation))||It.dot,Fe=e.hoistingLimitsByCwd?.get(Te);v(ne,ht,xe,ce,C,Ee,Te,rt==="dependencies"||Fe==="dependencies"||Fe==="workspaces")}}};return v(A.name,u,A,h,u,u.packageDependencies,It.dot,!1),{packageTree:h,hoistingLimits:a,errors:r,preserveSymlinksRequired:o}};function IIe(t,e,r){let o=r.resolveVirtual&&e.reference&&e.reference.startsWith("virtual:")?r.resolveVirtual(t.packageLocation):t.packageLocation;return ue.toPortablePath(o||t.packageLocation)}function BIt(t,e,r){let o=e.getLocator(t.name.replace(Oh,""),t.reference),a=e.getPackageInformation(o);if(a===null)throw new Error("Assertion failed: Expected the package to be registered");return r.pnpifyFs?{linkType:"SOFT",target:ue.toPortablePath(a.packageLocation)}:{linkType:a.linkType,target:IIe(a,t,e)}}var vIt=(t,e,r)=>{let o=new Map,a=(E,I,v)=>{let{linkType:x,target:C}=BIt(E,t,r);return{locator:gA(E),nodePath:I,target:C,linkType:x,aliases:v}},n=E=>{let[I,v]=E.split("/");return v?{scope:I,name:v}:{scope:null,name:I}},u=new Set,A=(E,I,v)=>{if(u.has(E))return;u.add(E);let x=Array.from(E.references).sort().join("#");for(let C of E.dependencies){let R=Array.from(C.references).sort().join("#");if(C.identName===E.identName.replace(Oh,"")&&R===x)continue;let L=Array.from(C.references).sort(),U={name:C.identName,reference:L[0]},{name:z,scope:te}=n(C.name),ae=te?[te,z]:[z],le=K.join(I,CIe),ce=K.join(le,...ae),Ce=`${v}/${U.name}`,de=a(U,v,L.slice(1)),Be=!1;if(de.linkType==="SOFT"&&r.project){let Ee=r.project.workspacesByCwd.get(de.target.slice(0,-1));Be=!!(Ee&&!Ee.manifest.name)}if(!C.name.endsWith(Oh)&&!Be){let Ee=o.get(ce);if(Ee){if(Ee.dirList)throw new Error(`Assertion failed: ${ce} cannot merge dir node with leaf node`);{let we=G.parseLocator(Ee.locator),Ae=G.parseLocator(de.locator);if(Ee.linkType!==de.linkType)throw new Error(`Assertion failed: ${ce} cannot merge nodes with different link types ${Ee.nodePath}/${G.stringifyLocator(we)} and ${v}/${G.stringifyLocator(Ae)}`);if(we.identHash!==Ae.identHash)throw new Error(`Assertion failed: ${ce} cannot merge nodes with different idents ${Ee.nodePath}/${G.stringifyLocator(we)} and ${v}/s${G.stringifyLocator(Ae)}`);de.aliases=[...de.aliases,...Ee.aliases,G.parseLocator(Ee.locator).reference]}}o.set(ce,de);let g=ce.split("/"),me=g.indexOf(CIe);for(let we=g.length-1;me>=0&&we>me;we--){let Ae=ue.toPortablePath(g.slice(0,we).join(K.sep)),ne=g[we],Z=o.get(Ae);if(!Z)o.set(Ae,{dirList:new Set([ne])});else if(Z.dirList){if(Z.dirList.has(ne))break;Z.dirList.add(ne)}}}A(C,de.linkType==="SOFT"?de.target:ce,Ce)}},p=a({name:e.name,reference:Array.from(e.references)[0]},"",[]),h=p.target;return o.set(h,p),A(e,h,""),o};Ge();Ge();Pt();Pt();nA();Nl();var rj={};Vt(rj,{PnpInstaller:()=>sd,PnpLinker:()=>Hh,UnplugCommand:()=>cC,default:()=>e1t,getPnpPath:()=>qh,jsInstallUtils:()=>mA,pnpUtils:()=>jB,quotePathIfNeeded:()=>o1e});Pt();var s1e=ve("url");Ge();Ge();Pt();Pt();var BIe={DEFAULT:{collapsed:!1,next:{"*":"DEFAULT"}},TOP_LEVEL:{collapsed:!1,next:{fallbackExclusionList:"FALLBACK_EXCLUSION_LIST",packageRegistryData:"PACKAGE_REGISTRY_DATA","*":"DEFAULT"}},FALLBACK_EXCLUSION_LIST:{collapsed:!1,next:{"*":"FALLBACK_EXCLUSION_ENTRIES"}},FALLBACK_EXCLUSION_ENTRIES:{collapsed:!0,next:{"*":"FALLBACK_EXCLUSION_DATA"}},FALLBACK_EXCLUSION_DATA:{collapsed:!0,next:{"*":"DEFAULT"}},PACKAGE_REGISTRY_DATA:{collapsed:!1,next:{"*":"PACKAGE_REGISTRY_ENTRIES"}},PACKAGE_REGISTRY_ENTRIES:{collapsed:!0,next:{"*":"PACKAGE_STORE_DATA"}},PACKAGE_STORE_DATA:{collapsed:!1,next:{"*":"PACKAGE_STORE_ENTRIES"}},PACKAGE_STORE_ENTRIES:{collapsed:!0,next:{"*":"PACKAGE_INFORMATION_DATA"}},PACKAGE_INFORMATION_DATA:{collapsed:!1,next:{packageDependencies:"PACKAGE_DEPENDENCIES","*":"DEFAULT"}},PACKAGE_DEPENDENCIES:{collapsed:!1,next:{"*":"PACKAGE_DEPENDENCY"}},PACKAGE_DEPENDENCY:{collapsed:!0,next:{"*":"DEFAULT"}}};function DIt(t,e,r){let o="";o+="[";for(let a=0,n=t.length;a"u"||(A!==0&&(a+=", "),a+=JSON.stringify(p),a+=": ",a+=lQ(p,h,e,r).replace(/^ +/g,""),A+=1)}return a+="}",a}function bIt(t,e,r){let o=Object.keys(t),a=`${r} `,n="";n+=r,n+=`{ +`;let u=0;for(let A=0,p=o.length;A"u"||(u!==0&&(n+=",",n+=` +`),n+=a,n+=JSON.stringify(h),n+=": ",n+=lQ(h,E,e,a).replace(/^ +/g,""),u+=1)}return u!==0&&(n+=` +`),n+=r,n+="}",n}function lQ(t,e,r,o){let{next:a}=BIe[r],n=a[t]||a["*"];return vIe(e,n,o)}function vIe(t,e,r){let{collapsed:o}=BIe[e];return Array.isArray(t)?o?DIt(t,e,r):PIt(t,e,r):typeof t=="object"&&t!==null?o?SIt(t,e,r):bIt(t,e,r):JSON.stringify(t)}function DIe(t){return vIe(t,"TOP_LEVEL","")}function RB(t,e){let r=Array.from(t);Array.isArray(e)||(e=[e]);let o=[];for(let n of e)o.push(r.map(u=>n(u)));let a=r.map((n,u)=>u);return a.sort((n,u)=>{for(let A of o){let p=A[n]A[u]?1:0;if(p!==0)return p}return 0}),a.map(n=>r[n])}function xIt(t){let e=new Map,r=RB(t.fallbackExclusionList||[],[({name:o,reference:a})=>o,({name:o,reference:a})=>a]);for(let{name:o,reference:a}of r){let n=e.get(o);typeof n>"u"&&e.set(o,n=new Set),n.add(a)}return Array.from(e).map(([o,a])=>[o,Array.from(a)])}function kIt(t){return RB(t.fallbackPool||[],([e])=>e)}function QIt(t){let e=[];for(let[r,o]of RB(t.packageRegistry,([a])=>a===null?"0":`1${a}`)){let a=[];e.push([r,a]);for(let[n,{packageLocation:u,packageDependencies:A,packagePeers:p,linkType:h,discardFromLookup:E}]of RB(o,([I])=>I===null?"0":`1${I}`)){let I=[];r!==null&&n!==null&&!A.has(r)&&I.push([r,n]);for(let[C,R]of RB(A.entries(),([L])=>L))I.push([C,R]);let v=p&&p.size>0?Array.from(p):void 0,x=E||void 0;a.push([n,{packageLocation:u,packageDependencies:I,packagePeers:v,linkType:h,discardFromLookup:x}])}}return e}function TB(t){return{__info:["This file is automatically generated. Do not touch it, or risk","your modifications being lost."],dependencyTreeRoots:t.dependencyTreeRoots,enableTopLevelFallback:t.enableTopLevelFallback||!1,ignorePatternData:t.ignorePattern||null,fallbackExclusionList:xIt(t),fallbackPool:kIt(t),packageRegistryData:QIt(t)}}var bIe=Ze(SIe());function xIe(t,e){return[t?`${t} +`:"",`/* eslint-disable */ +`,`// @ts-nocheck +`,`"use strict"; +`,` +`,e,` +`,(0,bIe.default)()].join("")}function FIt(t){return JSON.stringify(t,null,2)}function RIt(t){return`'${t.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(/\n/g,`\\ +`)}'`}function TIt(t){return[`const RAW_RUNTIME_STATE = +`,`${RIt(DIe(t))}; + +`,`function $$SETUP_STATE(hydrateRuntimeState, basePath) { +`,` return hydrateRuntimeState(JSON.parse(RAW_RUNTIME_STATE), {basePath: basePath || __dirname}); +`,`} +`].join("")}function NIt(){return[`function $$SETUP_STATE(hydrateRuntimeState, basePath) { +`,` const fs = require('fs'); +`,` const path = require('path'); +`,` const pnpDataFilepath = path.resolve(__dirname, ${JSON.stringify(dr.pnpData)}); +`,` return hydrateRuntimeState(JSON.parse(fs.readFileSync(pnpDataFilepath, 'utf8')), {basePath: basePath || __dirname}); +`,`} +`].join("")}function kIe(t){let e=TB(t),r=TIt(e);return xIe(t.shebang,r)}function QIe(t){let e=TB(t),r=NIt(),o=xIe(t.shebang,r);return{dataFile:FIt(e),loaderFile:o}}Pt();function Uq(t,{basePath:e}){let r=ue.toPortablePath(e),o=K.resolve(r),a=t.ignorePatternData!==null?new RegExp(t.ignorePatternData):null,n=new Map,u=new Map(t.packageRegistryData.map(([I,v])=>[I,new Map(v.map(([x,C])=>{if(I===null!=(x===null))throw new Error("Assertion failed: The name and reference should be null, or neither should");let R=C.discardFromLookup??!1,L={name:I,reference:x},U=n.get(C.packageLocation);U?(U.discardFromLookup=U.discardFromLookup&&R,R||(U.locator=L)):n.set(C.packageLocation,{locator:L,discardFromLookup:R});let z=null;return[x,{packageDependencies:new Map(C.packageDependencies),packagePeers:new Set(C.packagePeers),linkType:C.linkType,discardFromLookup:R,get packageLocation(){return z||(z=K.join(o,C.packageLocation))}}]}))])),A=new Map(t.fallbackExclusionList.map(([I,v])=>[I,new Set(v)])),p=new Map(t.fallbackPool),h=t.dependencyTreeRoots,E=t.enableTopLevelFallback;return{basePath:r,dependencyTreeRoots:h,enableTopLevelFallback:E,fallbackExclusionList:A,fallbackPool:p,ignorePattern:a,packageLocatorsByLocations:n,packageRegistry:u}}Pt();Pt();var ep=ve("module"),id=ve("url"),zq=ve("util");var Oo=ve("url");var NIe=Ze(ve("assert"));var _q=Array.isArray,NB=JSON.stringify,LB=Object.getOwnPropertyNames,nd=(t,e)=>Object.prototype.hasOwnProperty.call(t,e),Hq=(t,e)=>RegExp.prototype.exec.call(t,e),qq=(t,...e)=>RegExp.prototype[Symbol.replace].apply(t,e),Uh=(t,...e)=>String.prototype.endsWith.apply(t,e),jq=(t,...e)=>String.prototype.includes.apply(t,e),Gq=(t,...e)=>String.prototype.lastIndexOf.apply(t,e),MB=(t,...e)=>String.prototype.indexOf.apply(t,e),FIe=(t,...e)=>String.prototype.replace.apply(t,e),_h=(t,...e)=>String.prototype.slice.apply(t,e),dA=(t,...e)=>String.prototype.startsWith.apply(t,e),RIe=Map,TIe=JSON.parse;function OB(t,e,r){return class extends r{constructor(...o){super(e(...o)),this.code=t,this.name=`${r.name} [${t}]`}}}var LIe=OB("ERR_PACKAGE_IMPORT_NOT_DEFINED",(t,e,r)=>`Package import specifier "${t}" is not defined${e?` in package ${e}package.json`:""} imported from ${r}`,TypeError),Yq=OB("ERR_INVALID_MODULE_SPECIFIER",(t,e,r=void 0)=>`Invalid module "${t}" ${e}${r?` imported from ${r}`:""}`,TypeError),MIe=OB("ERR_INVALID_PACKAGE_TARGET",(t,e,r,o=!1,a=void 0)=>{let n=typeof r=="string"&&!o&&r.length&&!dA(r,"./");return e==="."?((0,NIe.default)(o===!1),`Invalid "exports" main target ${NB(r)} defined in the package config ${t}package.json${a?` imported from ${a}`:""}${n?'; targets must start with "./"':""}`):`Invalid "${o?"imports":"exports"}" target ${NB(r)} defined for '${e}' in the package config ${t}package.json${a?` imported from ${a}`:""}${n?'; targets must start with "./"':""}`},Error),UB=OB("ERR_INVALID_PACKAGE_CONFIG",(t,e,r)=>`Invalid package config ${t}${e?` while importing ${e}`:""}${r?`. ${r}`:""}`,Error),OIe=OB("ERR_PACKAGE_PATH_NOT_EXPORTED",(t,e,r=void 0)=>e==="."?`No "exports" main defined in ${t}package.json${r?` imported from ${r}`:""}`:`Package subpath '${e}' is not defined by "exports" in ${t}package.json${r?` imported from ${r}`:""}`,Error);var uQ=ve("url");function UIe(t,e){let r=Object.create(null);for(let o=0;oe):t+e}_B(r,t,o,u,a)}Hq(HIe,_h(t,2))!==null&&_B(r,t,o,u,a);let p=new URL(t,o),h=p.pathname,E=new URL(".",o).pathname;if(dA(h,E)||_B(r,t,o,u,a),e==="")return p;if(Hq(HIe,e)!==null){let I=n?FIe(r,"*",()=>e):r+e;OIt(I,o,u,a)}return n?new URL(qq(qIe,p.href,()=>e)):new URL(e,p)}function _It(t){let e=+t;return`${e}`!==t?!1:e>=0&&e<4294967295}function lC(t,e,r,o,a,n,u,A){if(typeof e=="string")return UIt(e,r,o,t,a,n,u,A);if(_q(e)){if(e.length===0)return null;let p;for(let h=0;hn?-1:n>a||r===-1?1:o===-1||t.length>e.length?-1:e.length>t.length?1:0}function HIt(t,e,r){if(typeof t=="string"||_q(t))return!0;if(typeof t!="object"||t===null)return!1;let o=LB(t),a=!1,n=0;for(let u=0;u=h.length&&Uh(e,I)&&GIe(n,h)===1&&Gq(h,"*")===E&&(n=h,u=_h(e,E,e.length-I.length))}}if(n){let p=r[n],h=lC(t,p,u,n,o,!0,!1,a);return h==null&&Wq(e,t,o),h}Wq(e,t,o)}function WIe({name:t,base:e,conditions:r,readFileSyncFn:o}){if(t==="#"||dA(t,"#/")||Uh(t,"/")){let u="is not a valid internal imports specifier name";throw new Yq(t,u,(0,Oo.fileURLToPath)(e))}let a,n=_Ie(e,o);if(n.exists){a=(0,Oo.pathToFileURL)(n.pjsonPath);let u=n.imports;if(u)if(nd(u,t)&&!jq(t,"*")){let A=lC(a,u[t],"",t,e,!1,!0,r);if(A!=null)return A}else{let A="",p,h=LB(u);for(let E=0;E=I.length&&Uh(t,x)&&GIe(A,I)===1&&Gq(I,"*")===v&&(A=I,p=_h(t,v,t.length-x.length))}}if(A){let E=u[A],I=lC(a,E,p,A,e,!0,!0,r);if(I!=null)return I}}}MIt(t,a,e)}Pt();var jIt=new Set(["BUILTIN_NODE_RESOLUTION_FAILED","MISSING_DEPENDENCY","MISSING_PEER_DEPENDENCY","QUALIFIED_PATH_RESOLUTION_FAILED","UNDECLARED_DEPENDENCY"]);function ts(t,e,r={},o){o??=jIt.has(t)?"MODULE_NOT_FOUND":t;let a={configurable:!0,writable:!0,enumerable:!1};return Object.defineProperties(new Error(e),{code:{...a,value:o},pnpCode:{...a,value:t},data:{...a,value:r}})}function cu(t){return ue.normalize(ue.fromPortablePath(t))}var JIe=Ze(VIe());function XIe(t){return GIt(),Vq[t]}var Vq;function GIt(){Vq||(Vq={"--conditions":[],...zIe(YIt()),...zIe(process.execArgv)})}function zIe(t){return(0,JIe.default)({"--conditions":[String],"-C":"--conditions"},{argv:t,permissive:!0})}function YIt(){let t=[],e=WIt(process.env.NODE_OPTIONS||"",t);return t.length,e}function WIt(t,e){let r=[],o=!1,a=!0;for(let n=0;nparseInt(t,10)),ZIe=Ua>19||Ua===19&&$f>=2||Ua===18&&$f>=13,xJt=Ua===20&&$f<6||Ua===19&&$f>=3,kJt=Ua>19||Ua===19&&$f>=6,QJt=Ua>=21||Ua===20&&$f>=10||Ua===18&&$f>=19,FJt=Ua>=21||Ua===20&&$f>=10||Ua===18&&$f>=20,RJt=Ua>=22;function $Ie(t){if(process.env.WATCH_REPORT_DEPENDENCIES&&process.send)if(t=t.map(e=>ue.fromPortablePath(zs.resolveVirtual(ue.toPortablePath(e)))),ZIe)process.send({"watch:require":t});else for(let e of t)process.send({"watch:require":e})}function Jq(t,e){let r=Number(process.env.PNP_ALWAYS_WARN_ON_FALLBACK)>0,o=Number(process.env.PNP_DEBUG_LEVEL),a=/^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:node:)?(?:@[^/]+\/)?[^/]+)\/*(.*|)$/,n=/^(\/|\.{1,2}(\/|$))/,u=/\/$/,A=/^\.{0,2}\//,p={name:null,reference:null},h=[],E=new Set;if(t.enableTopLevelFallback===!0&&h.push(p),e.compatibilityMode!==!1)for(let Te of["react-scripts","gatsby"]){let Fe=t.packageRegistry.get(Te);if(Fe)for(let ke of Fe.keys()){if(ke===null)throw new Error("Assertion failed: This reference shouldn't be null");h.push({name:Te,reference:ke})}}let{ignorePattern:I,packageRegistry:v,packageLocatorsByLocations:x}=t;function C(Te,Fe){return{fn:Te,args:Fe,error:null,result:null}}function R(Te){let Fe=process.stderr?.hasColors?.()??process.stdout.isTTY,ke=(et,Ue)=>`\x1B[${et}m${Ue}\x1B[0m`,Ye=Te.error;console.error(Ye?ke("31;1",`\u2716 ${Te.error?.message.replace(/\n.*/s,"")}`):ke("33;1","\u203C Resolution")),Te.args.length>0&&console.error();for(let et of Te.args)console.error(` ${ke("37;1","In \u2190")} ${(0,zq.inspect)(et,{colors:Fe,compact:!0})}`);Te.result&&(console.error(),console.error(` ${ke("37;1","Out \u2192")} ${(0,zq.inspect)(Te.result,{colors:Fe,compact:!0})}`));let Se=new Error().stack.match(/(?<=^ +)at.*/gm)?.slice(2)??[];if(Se.length>0){console.error();for(let et of Se)console.error(` ${ke("38;5;244",et)}`)}console.error()}function L(Te,Fe){if(e.allowDebug===!1)return Fe;if(Number.isFinite(o)){if(o>=2)return(...ke)=>{let Ye=C(Te,ke);try{return Ye.result=Fe(...ke)}catch(Se){throw Ye.error=Se}finally{R(Ye)}};if(o>=1)return(...ke)=>{try{return Fe(...ke)}catch(Ye){let Se=C(Te,ke);throw Se.error=Ye,R(Se),Ye}}}return Fe}function U(Te){let Fe=g(Te);if(!Fe)throw ts("INTERNAL","Couldn't find a matching entry in the dependency tree for the specified parent (this is probably an internal error)");return Fe}function z(Te){if(Te.name===null)return!0;for(let Fe of t.dependencyTreeRoots)if(Fe.name===Te.name&&Fe.reference===Te.reference)return!0;return!1}let te=new Set(["node","require",...XIe("--conditions")]);function ae(Te,Fe=te,ke){let Ye=Ae(K.join(Te,"internal.js"),{resolveIgnored:!0,includeDiscardFromLookup:!0});if(Ye===null)throw ts("INTERNAL",`The locator that owns the "${Te}" path can't be found inside the dependency tree (this is probably an internal error)`);let{packageLocation:Se}=U(Ye),et=K.join(Se,dr.manifest);if(!e.fakeFs.existsSync(et))return null;let Ue=JSON.parse(e.fakeFs.readFileSync(et,"utf8"));if(Ue.exports==null)return null;let b=K.contains(Se,Te);if(b===null)throw ts("INTERNAL","unqualifiedPath doesn't contain the packageLocation (this is probably an internal error)");b!=="."&&!A.test(b)&&(b=`./${b}`);try{let w=YIe({packageJSONUrl:(0,id.pathToFileURL)(ue.fromPortablePath(et)),packageSubpath:b,exports:Ue.exports,base:ke?(0,id.pathToFileURL)(ue.fromPortablePath(ke)):null,conditions:Fe});return ue.toPortablePath((0,id.fileURLToPath)(w))}catch(w){throw ts("EXPORTS_RESOLUTION_FAILED",w.message,{unqualifiedPath:cu(Te),locator:Ye,pkgJson:Ue,subpath:cu(b),conditions:Fe},w.code)}}function le(Te,Fe,{extensions:ke}){let Ye;try{Fe.push(Te),Ye=e.fakeFs.statSync(Te)}catch{}if(Ye&&!Ye.isDirectory())return e.fakeFs.realpathSync(Te);if(Ye&&Ye.isDirectory()){let Se;try{Se=JSON.parse(e.fakeFs.readFileSync(K.join(Te,dr.manifest),"utf8"))}catch{}let et;if(Se&&Se.main&&(et=K.resolve(Te,Se.main)),et&&et!==Te){let Ue=le(et,Fe,{extensions:ke});if(Ue!==null)return Ue}}for(let Se=0,et=ke.length;Se{let b=JSON.stringify(Ue.name);if(Ye.has(b))return;Ye.add(b);let w=me(Ue);for(let S of w)if(U(S).packagePeers.has(Te))Se(S);else{let F=ke.get(S.name);typeof F>"u"&&ke.set(S.name,F=new Set),F.add(S.reference)}};Se(Fe);let et=[];for(let Ue of[...ke.keys()].sort())for(let b of[...ke.get(Ue)].sort())et.push({name:Ue,reference:b});return et}function Ae(Te,{resolveIgnored:Fe=!1,includeDiscardFromLookup:ke=!1}={}){if(de(Te)&&!Fe)return null;let Ye=K.relative(t.basePath,Te);Ye.match(n)||(Ye=`./${Ye}`),Ye.endsWith("/")||(Ye=`${Ye}/`);do{let Se=x.get(Ye);if(typeof Se>"u"||Se.discardFromLookup&&!ke){Ye=Ye.substring(0,Ye.lastIndexOf("/",Ye.length-2)+1);continue}return Se.locator}while(Ye!=="");return null}function ne(Te){try{return e.fakeFs.readFileSync(ue.toPortablePath(Te),"utf8")}catch(Fe){if(Fe.code==="ENOENT")return;throw Fe}}function Z(Te,Fe,{considerBuiltins:ke=!0}={}){if(Te.startsWith("#"))throw new Error("resolveToUnqualified can not handle private import mappings");if(Te==="pnpapi")return ue.toPortablePath(e.pnpapiResolution);if(ke&&(0,ep.isBuiltin)(Te))return null;let Ye=cu(Te),Se=Fe&&cu(Fe);if(Fe&&de(Fe)&&(!K.isAbsolute(Te)||Ae(Te)===null)){let b=Ce(Te,Fe);if(b===!1)throw ts("BUILTIN_NODE_RESOLUTION_FAILED",`The builtin node resolution algorithm was unable to resolve the requested module (it didn't go through the pnp resolver because the issuer was explicitely ignored by the regexp) + +Require request: "${Ye}" +Required by: ${Se} +`,{request:Ye,issuer:Se});return ue.toPortablePath(b)}let et,Ue=Te.match(a);if(Ue){if(!Fe)throw ts("API_ERROR","The resolveToUnqualified function must be called with a valid issuer when the path isn't a builtin nor absolute",{request:Ye,issuer:Se});let[,b,w]=Ue,S=Ae(Fe);if(!S){let Re=Ce(Te,Fe);if(Re===!1)throw ts("BUILTIN_NODE_RESOLUTION_FAILED",`The builtin node resolution algorithm was unable to resolve the requested module (it didn't go through the pnp resolver because the issuer doesn't seem to be part of the Yarn-managed dependency tree). + +Require path: "${Ye}" +Required by: ${Se} +`,{request:Ye,issuer:Se});return ue.toPortablePath(Re)}let F=U(S).packageDependencies.get(b),J=null;if(F==null&&S.name!==null){let Re=t.fallbackExclusionList.get(S.name);if(!Re||!Re.has(S.reference)){for(let dt=0,jt=h.length;dtz(at))?X=ts("MISSING_PEER_DEPENDENCY",`${S.name} tried to access ${b} (a peer dependency) but it isn't provided by your application; this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${S.name}@${S.reference} (via ${Se}) +${Re.map(at=>`Ancestor breaking the chain: ${at.name}@${at.reference} +`).join("")} +`,{request:Ye,issuer:Se,issuerLocator:Object.assign({},S),dependencyName:b,brokenAncestors:Re}):X=ts("MISSING_PEER_DEPENDENCY",`${S.name} tried to access ${b} (a peer dependency) but it isn't provided by its ancestors; this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${S.name}@${S.reference} (via ${Se}) + +${Re.map(at=>`Ancestor breaking the chain: ${at.name}@${at.reference} +`).join("")} +`,{request:Ye,issuer:Se,issuerLocator:Object.assign({},S),dependencyName:b,brokenAncestors:Re})}else F===void 0&&(!ke&&(0,ep.isBuiltin)(Te)?z(S)?X=ts("UNDECLARED_DEPENDENCY",`Your application tried to access ${b}. While this module is usually interpreted as a Node builtin, your resolver is running inside a non-Node resolution context where such builtins are ignored. Since ${b} isn't otherwise declared in your dependencies, this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${Se} +`,{request:Ye,issuer:Se,dependencyName:b}):X=ts("UNDECLARED_DEPENDENCY",`${S.name} tried to access ${b}. While this module is usually interpreted as a Node builtin, your resolver is running inside a non-Node resolution context where such builtins are ignored. Since ${b} isn't otherwise declared in ${S.name}'s dependencies, this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${Se} +`,{request:Ye,issuer:Se,issuerLocator:Object.assign({},S),dependencyName:b}):z(S)?X=ts("UNDECLARED_DEPENDENCY",`Your application tried to access ${b}, but it isn't declared in your dependencies; this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${Se} +`,{request:Ye,issuer:Se,dependencyName:b}):X=ts("UNDECLARED_DEPENDENCY",`${S.name} tried to access ${b}, but it isn't declared in its dependencies; this makes the require call ambiguous and unsound. + +Required package: ${b}${b!==Ye?` (via "${Ye}")`:""} +Required by: ${S.name}@${S.reference} (via ${Se}) +`,{request:Ye,issuer:Se,issuerLocator:Object.assign({},S),dependencyName:b}));if(F==null){if(J===null||X===null)throw X||new Error("Assertion failed: Expected an error to have been set");F=J;let Re=X.message.replace(/\n.*/g,"");X.message=Re,!E.has(Re)&&o!==0&&(E.add(Re),process.emitWarning(X))}let $=Array.isArray(F)?{name:F[0],reference:F[1]}:{name:b,reference:F},ie=U($);if(!ie.packageLocation)throw ts("MISSING_DEPENDENCY",`A dependency seems valid but didn't get installed for some reason. This might be caused by a partial install, such as dev vs prod. + +Required package: ${$.name}@${$.reference}${$.name!==Ye?` (via "${Ye}")`:""} +Required by: ${S.name}@${S.reference} (via ${Se}) +`,{request:Ye,issuer:Se,dependencyLocator:Object.assign({},$)});let be=ie.packageLocation;w?et=K.join(be,w):et=be}else if(K.isAbsolute(Te))et=K.normalize(Te);else{if(!Fe)throw ts("API_ERROR","The resolveToUnqualified function must be called with a valid issuer when the path isn't a builtin nor absolute",{request:Ye,issuer:Se});let b=K.resolve(Fe);Fe.match(u)?et=K.normalize(K.join(b,Te)):et=K.normalize(K.join(K.dirname(b),Te))}return K.normalize(et)}function xe(Te,Fe,ke=te,Ye){if(n.test(Te))return Fe;let Se=ae(Fe,ke,Ye);return Se?K.normalize(Se):Fe}function Ne(Te,{extensions:Fe=Object.keys(ep.Module._extensions)}={}){let ke=[],Ye=le(Te,ke,{extensions:Fe});if(Ye)return K.normalize(Ye);{$Ie(ke.map(Ue=>ue.fromPortablePath(Ue)));let Se=cu(Te),et=Ae(Te);if(et){let{packageLocation:Ue}=U(et),b=!0;try{e.fakeFs.accessSync(Ue)}catch(w){if(w?.code==="ENOENT")b=!1;else{let S=(w?.message??w??"empty exception thrown").replace(/^[A-Z]/,y=>y.toLowerCase());throw ts("QUALIFIED_PATH_RESOLUTION_FAILED",`Required package exists but could not be accessed (${S}). + +Missing package: ${et.name}@${et.reference} +Expected package location: ${cu(Ue)} +`,{unqualifiedPath:Se,extensions:Fe})}}if(!b){let w=Ue.includes("/unplugged/")?"Required unplugged package missing from disk. This may happen when switching branches without running installs (unplugged packages must be fully materialized on disk to work).":"Required package missing from disk. If you keep your packages inside your repository then restarting the Node process may be enough. Otherwise, try to run an install first.";throw ts("QUALIFIED_PATH_RESOLUTION_FAILED",`${w} + +Missing package: ${et.name}@${et.reference} +Expected package location: ${cu(Ue)} +`,{unqualifiedPath:Se,extensions:Fe})}}throw ts("QUALIFIED_PATH_RESOLUTION_FAILED",`Qualified path resolution failed: we looked for the following paths, but none could be accessed. + +Source path: ${Se} +${ke.map(Ue=>`Not found: ${cu(Ue)} +`).join("")}`,{unqualifiedPath:Se,extensions:Fe})}}function ht(Te,Fe,ke){if(!Fe)throw new Error("Assertion failed: An issuer is required to resolve private import mappings");let Ye=WIe({name:Te,base:(0,id.pathToFileURL)(ue.fromPortablePath(Fe)),conditions:ke.conditions??te,readFileSyncFn:ne});if(Ye instanceof URL)return Ne(ue.toPortablePath((0,id.fileURLToPath)(Ye)),{extensions:ke.extensions});if(Ye.startsWith("#"))throw new Error("Mapping from one private import to another isn't allowed");return H(Ye,Fe,ke)}function H(Te,Fe,ke={}){try{if(Te.startsWith("#"))return ht(Te,Fe,ke);let{considerBuiltins:Ye,extensions:Se,conditions:et}=ke,Ue=Z(Te,Fe,{considerBuiltins:Ye});if(Te==="pnpapi")return Ue;if(Ue===null)return null;let b=()=>Fe!==null?de(Fe):!1,w=(!Ye||!(0,ep.isBuiltin)(Te))&&!b()?xe(Te,Ue,et,Fe):Ue;return Ne(w,{extensions:Se})}catch(Ye){throw Object.hasOwn(Ye,"pnpCode")&&Object.assign(Ye.data,{request:cu(Te),issuer:Fe&&cu(Fe)}),Ye}}function rt(Te){let Fe=K.normalize(Te),ke=zs.resolveVirtual(Fe);return ke!==Fe?ke:null}return{VERSIONS:Be,topLevel:Ee,getLocator:(Te,Fe)=>Array.isArray(Fe)?{name:Fe[0],reference:Fe[1]}:{name:Te,reference:Fe},getDependencyTreeRoots:()=>[...t.dependencyTreeRoots],getAllLocators(){let Te=[];for(let[Fe,ke]of v)for(let Ye of ke.keys())Fe!==null&&Ye!==null&&Te.push({name:Fe,reference:Ye});return Te},getPackageInformation:Te=>{let Fe=g(Te);if(Fe===null)return null;let ke=ue.fromPortablePath(Fe.packageLocation);return{...Fe,packageLocation:ke}},findPackageLocator:Te=>Ae(ue.toPortablePath(Te)),resolveToUnqualified:L("resolveToUnqualified",(Te,Fe,ke)=>{let Ye=Fe!==null?ue.toPortablePath(Fe):null,Se=Z(ue.toPortablePath(Te),Ye,ke);return Se===null?null:ue.fromPortablePath(Se)}),resolveUnqualified:L("resolveUnqualified",(Te,Fe)=>ue.fromPortablePath(Ne(ue.toPortablePath(Te),Fe))),resolveRequest:L("resolveRequest",(Te,Fe,ke)=>{let Ye=Fe!==null?ue.toPortablePath(Fe):null,Se=H(ue.toPortablePath(Te),Ye,ke);return Se===null?null:ue.fromPortablePath(Se)}),resolveVirtual:L("resolveVirtual",Te=>{let Fe=rt(ue.toPortablePath(Te));return Fe!==null?ue.fromPortablePath(Fe):null})}}Pt();var e1e=(t,e,r)=>{let o=TB(t),a=Uq(o,{basePath:e}),n=ue.join(e,dr.pnpCjs);return Jq(a,{fakeFs:r,pnpapiResolution:n})};var Zq=Ze(r1e());qt();var mA={};Vt(mA,{checkManifestCompatibility:()=>n1e,extractBuildRequest:()=>AQ,getExtractHint:()=>$q,hasBindingGyp:()=>ej});Ge();Pt();function n1e(t){return G.isPackageCompatible(t,Xi.getArchitectureSet())}function AQ(t,e,r,{configuration:o}){let a=[];for(let n of["preinstall","install","postinstall"])e.manifest.scripts.has(n)&&a.push({type:0,script:n});return!e.manifest.scripts.has("install")&&e.misc.hasBindingGyp&&a.push({type:1,script:"node-gyp rebuild"}),a.length===0?null:t.linkType!=="HARD"?{skipped:!0,explain:n=>n.reportWarningOnce(6,`${G.prettyLocator(o,t)} lists build scripts, but is referenced through a soft link. Soft links don't support build scripts, so they'll be ignored.`)}:r&&r.built===!1?{skipped:!0,explain:n=>n.reportInfoOnce(5,`${G.prettyLocator(o,t)} lists build scripts, but its build has been explicitly disabled through configuration.`)}:!o.get("enableScripts")&&!r.built?{skipped:!0,explain:n=>n.reportWarningOnce(4,`${G.prettyLocator(o,t)} lists build scripts, but all build scripts have been disabled.`)}:n1e(t)?{skipped:!1,directives:a}:{skipped:!0,explain:n=>n.reportWarningOnce(76,`${G.prettyLocator(o,t)} The ${Xi.getArchitectureName()} architecture is incompatible with this package, build skipped.`)}}var VIt=new Set([".exe",".bin",".h",".hh",".hpp",".c",".cc",".cpp",".java",".jar",".node"]);function $q(t){return t.packageFs.getExtractHint({relevantExtensions:VIt})}function ej(t){let e=K.join(t.prefixPath,"binding.gyp");return t.packageFs.existsSync(e)}var jB={};Vt(jB,{getUnpluggedPath:()=>qB});Ge();Pt();function qB(t,{configuration:e}){return K.resolve(e.get("pnpUnpluggedFolder"),G.slugifyLocator(t))}var zIt=new Set([G.makeIdent(null,"open").identHash,G.makeIdent(null,"opn").identHash]),Hh=class{constructor(){this.mode="strict";this.pnpCache=new Map}getCustomDataKey(){return JSON.stringify({name:"PnpLinker",version:2})}supportsPackage(e,r){return this.isEnabled(r)}async findPackageLocation(e,r){if(!this.isEnabled(r))throw new Error("Assertion failed: Expected the PnP linker to be enabled");let o=qh(r.project).cjs;if(!oe.existsSync(o))throw new st(`The project in ${pe.pretty(r.project.configuration,`${r.project.cwd}/package.json`,pe.Type.PATH)} doesn't seem to have been installed - running an install there might help`);let a=He.getFactoryWithDefault(this.pnpCache,o,()=>He.dynamicRequire(o,{cachingStrategy:He.CachingStrategy.FsTime})),n={name:G.stringifyIdent(e),reference:e.reference},u=a.getPackageInformation(n);if(!u)throw new st(`Couldn't find ${G.prettyLocator(r.project.configuration,e)} in the currently installed PnP map - running an install might help`);return ue.toPortablePath(u.packageLocation)}async findPackageLocator(e,r){if(!this.isEnabled(r))return null;let o=qh(r.project).cjs;if(!oe.existsSync(o))return null;let n=He.getFactoryWithDefault(this.pnpCache,o,()=>He.dynamicRequire(o,{cachingStrategy:He.CachingStrategy.FsTime})).findPackageLocator(ue.fromPortablePath(e));return n?G.makeLocator(G.parseIdent(n.name),n.reference):null}makeInstaller(e){return new sd(e)}isEnabled(e){return!(e.project.configuration.get("nodeLinker")!=="pnp"||e.project.configuration.get("pnpMode")!==this.mode)}},sd=class{constructor(e){this.opts=e;this.mode="strict";this.asyncActions=new He.AsyncActions(10);this.packageRegistry=new Map;this.virtualTemplates=new Map;this.isESMLoaderRequired=!1;this.customData={store:new Map};this.unpluggedPaths=new Set;this.opts=e}attachCustomData(e){this.customData=e}async installPackage(e,r,o){let a=G.stringifyIdent(e),n=e.reference,u=!!this.opts.project.tryWorkspaceByLocator(e),A=G.isVirtualLocator(e),p=e.peerDependencies.size>0&&!A,h=!p&&!u,E=!p&&e.linkType!=="SOFT",I,v;if(h||E){let te=A?G.devirtualizeLocator(e):e;I=this.customData.store.get(te.locatorHash),typeof I>"u"&&(I=await JIt(r),e.linkType==="HARD"&&this.customData.store.set(te.locatorHash,I)),I.manifest.type==="module"&&(this.isESMLoaderRequired=!0),v=this.opts.project.getDependencyMeta(te,e.version)}let x=h?AQ(e,I,v,{configuration:this.opts.project.configuration}):null,C=E?await this.unplugPackageIfNeeded(e,I,r,v,o):r.packageFs;if(K.isAbsolute(r.prefixPath))throw new Error(`Assertion failed: Expected the prefix path (${r.prefixPath}) to be relative to the parent`);let R=K.resolve(C.getRealPath(),r.prefixPath),L=tj(this.opts.project.cwd,R),U=new Map,z=new Set;if(A){for(let te of e.peerDependencies.values())U.set(G.stringifyIdent(te),null),z.add(G.stringifyIdent(te));if(!u){let te=G.devirtualizeLocator(e);this.virtualTemplates.set(te.locatorHash,{location:tj(this.opts.project.cwd,zs.resolveVirtual(R)),locator:te})}}return He.getMapWithDefault(this.packageRegistry,a).set(n,{packageLocation:L,packageDependencies:U,packagePeers:z,linkType:e.linkType,discardFromLookup:r.discardFromLookup||!1}),{packageLocation:R,buildRequest:x}}async attachInternalDependencies(e,r){let o=this.getPackageInformation(e);for(let[a,n]of r){let u=G.areIdentsEqual(a,n)?n.reference:[G.stringifyIdent(n),n.reference];o.packageDependencies.set(G.stringifyIdent(a),u)}}async attachExternalDependents(e,r){for(let o of r)this.getDiskInformation(o).packageDependencies.set(G.stringifyIdent(e),e.reference)}async finalizeInstall(){if(this.opts.project.configuration.get("pnpMode")!==this.mode)return;let e=qh(this.opts.project);if(this.isEsmEnabled()||await oe.removePromise(e.esmLoader),this.opts.project.configuration.get("nodeLinker")!=="pnp"){await oe.removePromise(e.cjs),await oe.removePromise(e.data),await oe.removePromise(e.esmLoader),await oe.removePromise(this.opts.project.configuration.get("pnpUnpluggedFolder"));return}for(let{locator:E,location:I}of this.virtualTemplates.values())He.getMapWithDefault(this.packageRegistry,G.stringifyIdent(E)).set(E.reference,{packageLocation:I,packageDependencies:new Map,packagePeers:new Set,linkType:"SOFT",discardFromLookup:!1});this.packageRegistry.set(null,new Map([[null,this.getPackageInformation(this.opts.project.topLevelWorkspace.anchoredLocator)]]));let r=this.opts.project.configuration.get("pnpFallbackMode"),o=this.opts.project.workspaces.map(({anchoredLocator:E})=>({name:G.stringifyIdent(E),reference:E.reference})),a=r!=="none",n=[],u=new Map,A=He.buildIgnorePattern([".yarn/sdks/**",...this.opts.project.configuration.get("pnpIgnorePatterns")]),p=this.packageRegistry,h=this.opts.project.configuration.get("pnpShebang");if(r==="dependencies-only")for(let E of this.opts.project.storedPackages.values())this.opts.project.tryWorkspaceByLocator(E)&&n.push({name:G.stringifyIdent(E),reference:E.reference});return await this.asyncActions.wait(),await this.finalizeInstallWithPnp({dependencyTreeRoots:o,enableTopLevelFallback:a,fallbackExclusionList:n,fallbackPool:u,ignorePattern:A,packageRegistry:p,shebang:h}),{customData:this.customData}}async transformPnpSettings(e){}isEsmEnabled(){if(this.opts.project.configuration.sources.has("pnpEnableEsmLoader"))return this.opts.project.configuration.get("pnpEnableEsmLoader");if(this.isESMLoaderRequired)return!0;for(let e of this.opts.project.workspaces)if(e.manifest.type==="module")return!0;return!1}async finalizeInstallWithPnp(e){let r=qh(this.opts.project),o=await this.locateNodeModules(e.ignorePattern);if(o.length>0){this.opts.report.reportWarning(31,"One or more node_modules have been detected and will be removed. This operation may take some time.");for(let n of o)await oe.removePromise(n)}if(await this.transformPnpSettings(e),this.opts.project.configuration.get("pnpEnableInlining")){let n=kIe(e);await oe.changeFilePromise(r.cjs,n,{automaticNewlines:!0,mode:493}),await oe.removePromise(r.data)}else{let{dataFile:n,loaderFile:u}=QIe(e);await oe.changeFilePromise(r.cjs,u,{automaticNewlines:!0,mode:493}),await oe.changeFilePromise(r.data,n,{automaticNewlines:!0,mode:420})}this.isEsmEnabled()&&(this.opts.report.reportWarning(0,"ESM support for PnP uses the experimental loader API and is therefore experimental"),await oe.changeFilePromise(r.esmLoader,(0,Zq.default)(),{automaticNewlines:!0,mode:420}));let a=this.opts.project.configuration.get("pnpUnpluggedFolder");if(this.unpluggedPaths.size===0)await oe.removePromise(a);else for(let n of await oe.readdirPromise(a)){let u=K.resolve(a,n);this.unpluggedPaths.has(u)||await oe.removePromise(u)}}async locateNodeModules(e){let r=[],o=e?new RegExp(e):null;for(let a of this.opts.project.workspaces){let n=K.join(a.cwd,"node_modules");if(o&&o.test(K.relative(this.opts.project.cwd,a.cwd))||!oe.existsSync(n))continue;let u=await oe.readdirPromise(n,{withFileTypes:!0}),A=u.filter(p=>!p.isDirectory()||p.name===".bin"||!p.name.startsWith("."));if(A.length===u.length)r.push(n);else for(let p of A)r.push(K.join(n,p.name))}return r}async unplugPackageIfNeeded(e,r,o,a,n){return this.shouldBeUnplugged(e,r,a)?this.unplugPackage(e,o,n):o.packageFs}shouldBeUnplugged(e,r,o){return typeof o.unplugged<"u"?o.unplugged:zIt.has(e.identHash)||e.conditions!=null?!0:r.manifest.preferUnplugged!==null?r.manifest.preferUnplugged:!!(AQ(e,r,o,{configuration:this.opts.project.configuration})?.skipped===!1||r.misc.extractHint)}async unplugPackage(e,r,o){let a=qB(e,{configuration:this.opts.project.configuration});return this.opts.project.disabledLocators.has(e.locatorHash)?new Hu(a,{baseFs:r.packageFs,pathUtils:K}):(this.unpluggedPaths.add(a),o.holdFetchResult(this.asyncActions.set(e.locatorHash,async()=>{let n=K.join(a,r.prefixPath,".ready");await oe.existsPromise(n)||(this.opts.project.storedBuildState.delete(e.locatorHash),await oe.mkdirPromise(a,{recursive:!0}),await oe.copyPromise(a,It.dot,{baseFs:r.packageFs,overwrite:!1}),await oe.writeFilePromise(n,""))})),new gn(a))}getPackageInformation(e){let r=G.stringifyIdent(e),o=e.reference,a=this.packageRegistry.get(r);if(!a)throw new Error(`Assertion failed: The package information store should have been available (for ${G.prettyIdent(this.opts.project.configuration,e)})`);let n=a.get(o);if(!n)throw new Error(`Assertion failed: The package information should have been available (for ${G.prettyLocator(this.opts.project.configuration,e)})`);return n}getDiskInformation(e){let r=He.getMapWithDefault(this.packageRegistry,"@@disk"),o=tj(this.opts.project.cwd,e);return He.getFactoryWithDefault(r,o,()=>({packageLocation:o,packageDependencies:new Map,packagePeers:new Set,linkType:"SOFT",discardFromLookup:!1}))}};function tj(t,e){let r=K.relative(t,e);return r.match(/^\.{0,2}\//)||(r=`./${r}`),r.replace(/\/?$/,"/")}async function JIt(t){let e=await Ut.tryFind(t.prefixPath,{baseFs:t.packageFs})??new Ut,r=new Set(["preinstall","install","postinstall"]);for(let o of e.scripts.keys())r.has(o)||e.scripts.delete(o);return{manifest:{scripts:e.scripts,preferUnplugged:e.preferUnplugged,type:e.type},misc:{extractHint:$q(t),hasBindingGyp:ej(t)}}}Ge();Ge();qt();var i1e=Ze($o());var cC=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Unplug direct dependencies from the entire project"});this.recursive=ge.Boolean("-R,--recursive",!1,{description:"Unplug both direct and transitive dependencies"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.patterns=ge.Rest()}static{this.paths=[["unplug"]]}static{this.usage=it.Usage({description:"force the unpacking of a list of packages",details:"\n This command will add the selectors matching the specified patterns to the list of packages that must be unplugged when installed.\n\n A package being unplugged means that instead of being referenced directly through its archive, it will be unpacked at install time in the directory configured via `pnpUnpluggedFolder`. Note that unpacking packages this way is generally not recommended because it'll make it harder to store your packages within the repository. However, it's a good approach to quickly and safely debug some packages, and can even sometimes be required depending on the context (for example when the package contains shellscripts).\n\n Running the command will set a persistent flag inside your top-level `package.json`, in the `dependenciesMeta` field. As such, to undo its effects, you'll need to revert the changes made to the manifest and run `yarn install` to apply the modification.\n\n By default, only direct dependencies from the current workspace are affected. If `-A,--all` is set, direct dependencies from the entire project are affected. Using the `-R,--recursive` flag will affect transitive dependencies as well as direct ones.\n\n This command accepts glob patterns inside the scope and name components (not the range). Make sure to escape the patterns to prevent your own shell from trying to expand them.\n ",examples:[["Unplug the lodash dependency from the active workspace","yarn unplug lodash"],["Unplug all instances of lodash referenced by any workspace","yarn unplug lodash -A"],["Unplug all instances of lodash referenced by the active workspace and its dependencies","yarn unplug lodash -R"],["Unplug all instances of lodash, anywhere","yarn unplug lodash -AR"],["Unplug one specific version of lodash","yarn unplug lodash@1.2.3"],["Unplug all packages with the `@babel` scope","yarn unplug '@babel/*'"],["Unplug all packages (only for testing, not recommended)","yarn unplug -R '*'"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);if(r.get("nodeLinker")!=="pnp")throw new st("This command can only be used if the `nodeLinker` option is set to `pnp`");await o.restoreInstallState();let u=new Set(this.patterns),A=this.patterns.map(x=>{let C=G.parseDescriptor(x),R=C.range!=="unknown"?C:G.makeDescriptor(C,"*");if(!Lr.validRange(R.range))throw new st(`The range of the descriptor patterns must be a valid semver range (${G.prettyDescriptor(r,R)})`);return L=>{let U=G.stringifyIdent(L);return!i1e.default.isMatch(U,G.stringifyIdent(R))||L.version&&!Lr.satisfiesWithPrereleases(L.version,R.range)?!1:(u.delete(x),!0)}}),p=()=>{let x=[];for(let C of o.storedPackages.values())!o.tryWorkspaceByLocator(C)&&!G.isVirtualLocator(C)&&A.some(R=>R(C))&&x.push(C);return x},h=x=>{let C=new Set,R=[],L=(U,z)=>{if(C.has(U.locatorHash))return;let te=!!o.tryWorkspaceByLocator(U);if(!(z>0&&!this.recursive&&te)&&(C.add(U.locatorHash),!o.tryWorkspaceByLocator(U)&&A.some(ae=>ae(U))&&R.push(U),!(z>0&&!this.recursive)))for(let ae of U.dependencies.values()){let le=o.storedResolutions.get(ae.descriptorHash);if(!le)throw new Error("Assertion failed: The resolution should have been registered");let ce=o.storedPackages.get(le);if(!ce)throw new Error("Assertion failed: The package should have been registered");L(ce,z+1)}};for(let U of x)L(U.anchoredPackage,0);return R},E,I;if(this.all&&this.recursive?(E=p(),I="the project"):this.all?(E=h(o.workspaces),I="any workspace"):(E=h([a]),I="this workspace"),u.size>1)throw new st(`Patterns ${pe.prettyList(r,u,pe.Type.CODE)} don't match any packages referenced by ${I}`);if(u.size>0)throw new st(`Pattern ${pe.prettyList(r,u,pe.Type.CODE)} doesn't match any packages referenced by ${I}`);E=He.sortMap(E,x=>G.stringifyLocator(x));let v=await Rt.start({configuration:r,stdout:this.context.stdout,json:this.json},async x=>{for(let C of E){let R=C.version??"unknown",L=o.topLevelWorkspace.manifest.ensureDependencyMeta(G.makeDescriptor(C,R));L.unplugged=!0,x.reportInfo(0,`Will unpack ${G.prettyLocator(r,C)} to ${pe.pretty(r,qB(C,{configuration:r}),pe.Type.PATH)}`),x.reportJson({locator:G.stringifyLocator(C),version:R})}await o.topLevelWorkspace.persistManifest(),this.json||x.reportSeparator()});return v.hasErrors()?v.exitCode():await o.installWithNewReport({json:this.json,stdout:this.context.stdout},{cache:n})}};var qh=t=>({cjs:K.join(t.cwd,dr.pnpCjs),data:K.join(t.cwd,dr.pnpData),esmLoader:K.join(t.cwd,dr.pnpEsmLoader)}),o1e=t=>/\s/.test(t)?JSON.stringify(t):t;async function XIt(t,e,r){let o=/\s*--require\s+\S*\.pnp\.c?js\s*/g,a=/\s*--experimental-loader\s+\S*\.pnp\.loader\.mjs\s*/,n=(e.NODE_OPTIONS??"").replace(o," ").replace(a," ").trim();if(t.configuration.get("nodeLinker")!=="pnp"){e.NODE_OPTIONS=n||void 0;return}let u=qh(t),A=`--require ${o1e(ue.fromPortablePath(u.cjs))}`;oe.existsSync(u.esmLoader)&&(A=`${A} --experimental-loader ${(0,s1e.pathToFileURL)(ue.fromPortablePath(u.esmLoader)).href}`),oe.existsSync(u.cjs)&&(e.NODE_OPTIONS=n?`${A} ${n}`:A)}async function ZIt(t,e){let r=qh(t);e(r.cjs),e(r.data),e(r.esmLoader),e(t.configuration.get("pnpUnpluggedFolder"))}var $It={hooks:{populateYarnPaths:ZIt,setupScriptEnvironment:XIt},configuration:{nodeLinker:{description:'The linker used for installing Node packages, one of: "pnp", "pnpm", or "node-modules"',type:"STRING",default:"pnp"},winLinkType:{description:"Whether Yarn should use Windows Junctions or symlinks when creating links on Windows.",type:"STRING",values:["junctions","symlinks"],default:"junctions"},pnpMode:{description:"If 'strict', generates standard PnP maps. If 'loose', merges them with the n_m resolution.",type:"STRING",default:"strict"},pnpShebang:{description:"String to prepend to the generated PnP script",type:"STRING",default:"#!/usr/bin/env node"},pnpIgnorePatterns:{description:"Array of glob patterns; files matching them will use the classic resolution",type:"STRING",default:[],isArray:!0},pnpEnableEsmLoader:{description:"If true, Yarn will generate an ESM loader (`.pnp.loader.mjs`). If this is not explicitly set Yarn tries to automatically detect whether ESM support is required.",type:"BOOLEAN",default:!1},pnpEnableInlining:{description:"If true, the PnP data will be inlined along with the generated loader",type:"BOOLEAN",default:!0},pnpFallbackMode:{description:"If true, the generated PnP loader will follow the top-level fallback rule",type:"STRING",default:"dependencies-only"},pnpUnpluggedFolder:{description:"Folder where the unplugged packages must be stored",type:"ABSOLUTE_PATH",default:"./.yarn/unplugged"}},linkers:[Hh],commands:[cC]},e1t=$It;var h1e=Ze(A1e());qt();var cj=Ze(ve("crypto")),g1e=Ze(ve("fs")),d1e=1,Di="node_modules",fQ=".bin",m1e=".yarn-state.yml",m1t=1e3,uj=(o=>(o.CLASSIC="classic",o.HARDLINKS_LOCAL="hardlinks-local",o.HARDLINKS_GLOBAL="hardlinks-global",o))(uj||{}),GB=class{constructor(){this.installStateCache=new Map}getCustomDataKey(){return JSON.stringify({name:"NodeModulesLinker",version:3})}supportsPackage(e,r){return this.isEnabled(r)}async findPackageLocation(e,r){if(!this.isEnabled(r))throw new Error("Assertion failed: Expected the node-modules linker to be enabled");let o=r.project.tryWorkspaceByLocator(e);if(o)return o.cwd;let a=await He.getFactoryWithDefault(this.installStateCache,r.project.cwd,async()=>await lj(r.project,{unrollAliases:!0}));if(a===null)throw new st("Couldn't find the node_modules state file - running an install might help (findPackageLocation)");let n=a.locatorMap.get(G.stringifyLocator(e));if(!n){let p=new st(`Couldn't find ${G.prettyLocator(r.project.configuration,e)} in the currently installed node_modules map - running an install might help`);throw p.code="LOCATOR_NOT_INSTALLED",p}let u=n.locations.sort((p,h)=>p.split(K.sep).length-h.split(K.sep).length),A=K.join(r.project.configuration.startingCwd,Di);return u.find(p=>K.contains(A,p))||n.locations[0]}async findPackageLocator(e,r){if(!this.isEnabled(r))return null;let o=await He.getFactoryWithDefault(this.installStateCache,r.project.cwd,async()=>await lj(r.project,{unrollAliases:!0}));if(o===null)return null;let{locationRoot:a,segments:n}=pQ(K.resolve(e),{skipPrefix:r.project.cwd}),u=o.locationTree.get(a);if(!u)return null;let A=u.locator;for(let p of n){if(u=u.children.get(p),!u)break;A=u.locator||A}return G.parseLocator(A)}makeInstaller(e){return new aj(e)}isEnabled(e){return e.project.configuration.get("nodeLinker")==="node-modules"}},aj=class{constructor(e){this.opts=e;this.localStore=new Map;this.realLocatorChecksums=new Map;this.customData={store:new Map}}attachCustomData(e){this.customData=e}async installPackage(e,r){let o=K.resolve(r.packageFs.getRealPath(),r.prefixPath),a=this.customData.store.get(e.locatorHash);if(typeof a>"u"&&(a=await y1t(e,r),e.linkType==="HARD"&&this.customData.store.set(e.locatorHash,a)),!G.isPackageCompatible(e,this.opts.project.configuration.getSupportedArchitectures()))return{packageLocation:null,buildRequest:null};let n=new Map,u=new Set;n.has(G.stringifyIdent(e))||n.set(G.stringifyIdent(e),e.reference);let A=e;if(G.isVirtualLocator(e)){A=G.devirtualizeLocator(e);for(let E of e.peerDependencies.values())n.set(G.stringifyIdent(E),null),u.add(G.stringifyIdent(E))}let p={packageLocation:`${ue.fromPortablePath(o)}/`,packageDependencies:n,packagePeers:u,linkType:e.linkType,discardFromLookup:r.discardFromLookup??!1};this.localStore.set(e.locatorHash,{pkg:e,customPackageData:a,dependencyMeta:this.opts.project.getDependencyMeta(e,e.version),pnpNode:p});let h=r.checksum?r.checksum.substring(r.checksum.indexOf("/")+1):null;return this.realLocatorChecksums.set(A.locatorHash,h),{packageLocation:o,buildRequest:null}}async attachInternalDependencies(e,r){let o=this.localStore.get(e.locatorHash);if(typeof o>"u")throw new Error("Assertion failed: Expected information object to have been registered");for(let[a,n]of r){let u=G.areIdentsEqual(a,n)?n.reference:[G.stringifyIdent(n),n.reference];o.pnpNode.packageDependencies.set(G.stringifyIdent(a),u)}}async attachExternalDependents(e,r){throw new Error("External dependencies haven't been implemented for the node-modules linker")}async finalizeInstall(){if(this.opts.project.configuration.get("nodeLinker")!=="node-modules")return;let e=new zs({baseFs:new rA({maxOpenFiles:80,readOnlyArchives:!0})}),r=await lj(this.opts.project),o=this.opts.project.configuration.get("nmMode");(r===null||o!==r.nmMode)&&(this.opts.project.storedBuildState.clear(),r={locatorMap:new Map,binSymlinks:new Map,locationTree:new Map,nmMode:o,mtimeMs:0});let a=new Map(this.opts.project.workspaces.map(v=>{let x=this.opts.project.configuration.get("nmHoistingLimits");try{x=He.validateEnum(QB,v.manifest.installConfig?.hoistingLimits??x)}catch{let R=G.prettyWorkspace(this.opts.project.configuration,v);this.opts.report.reportWarning(57,`${R}: Invalid 'installConfig.hoistingLimits' value. Expected one of ${Object.values(QB).join(", ")}, using default: "${x}"`)}return[v.relativeCwd,x]})),n=new Map(this.opts.project.workspaces.map(v=>{let x=this.opts.project.configuration.get("nmSelfReferences");return x=v.manifest.installConfig?.selfReferences??x,[v.relativeCwd,x]})),u={VERSIONS:{std:1},topLevel:{name:null,reference:null},getLocator:(v,x)=>Array.isArray(x)?{name:x[0],reference:x[1]}:{name:v,reference:x},getDependencyTreeRoots:()=>this.opts.project.workspaces.map(v=>{let x=v.anchoredLocator;return{name:G.stringifyIdent(x),reference:x.reference}}),getPackageInformation:v=>{let x=v.reference===null?this.opts.project.topLevelWorkspace.anchoredLocator:G.makeLocator(G.parseIdent(v.name),v.reference),C=this.localStore.get(x.locatorHash);if(typeof C>"u")throw new Error("Assertion failed: Expected the package reference to have been registered");return C.pnpNode},findPackageLocator:v=>{let x=this.opts.project.tryWorkspaceByCwd(ue.toPortablePath(v));if(x!==null){let C=x.anchoredLocator;return{name:G.stringifyIdent(C),reference:C.reference}}throw new Error("Assertion failed: Unimplemented")},resolveToUnqualified:()=>{throw new Error("Assertion failed: Unimplemented")},resolveUnqualified:()=>{throw new Error("Assertion failed: Unimplemented")},resolveRequest:()=>{throw new Error("Assertion failed: Unimplemented")},resolveVirtual:v=>ue.fromPortablePath(zs.resolveVirtual(ue.toPortablePath(v)))},{tree:A,errors:p,preserveSymlinksRequired:h}=FB(u,{pnpifyFs:!1,validateExternalSoftLinks:!0,hoistingLimitsByCwd:a,project:this.opts.project,selfReferencesByCwd:n});if(!A){for(let{messageName:v,text:x}of p)this.opts.report.reportError(v,x);return}let E=Mq(A);await v1t(r,E,{baseFs:e,project:this.opts.project,report:this.opts.report,realLocatorChecksums:this.realLocatorChecksums,loadManifest:async v=>{let x=G.parseLocator(v),C=this.localStore.get(x.locatorHash);if(typeof C>"u")throw new Error("Assertion failed: Expected the slot to exist");return C.customPackageData.manifest}});let I=[];for(let[v,x]of E.entries()){if(C1e(v))continue;let C=G.parseLocator(v),R=this.localStore.get(C.locatorHash);if(typeof R>"u")throw new Error("Assertion failed: Expected the slot to exist");if(this.opts.project.tryWorkspaceByLocator(R.pkg))continue;let L=mA.extractBuildRequest(R.pkg,R.customPackageData,R.dependencyMeta,{configuration:this.opts.project.configuration});L&&I.push({buildLocations:x.locations,locator:C,buildRequest:L})}return h&&this.opts.report.reportWarning(72,`The application uses portals and that's why ${pe.pretty(this.opts.project.configuration,"--preserve-symlinks",pe.Type.CODE)} Node option is required for launching it`),{customData:this.customData,records:I}}};async function y1t(t,e){let r=await Ut.tryFind(e.prefixPath,{baseFs:e.packageFs})??new Ut,o=new Set(["preinstall","install","postinstall"]);for(let a of r.scripts.keys())o.has(a)||r.scripts.delete(a);return{manifest:{bin:r.bin,scripts:r.scripts},misc:{hasBindingGyp:mA.hasBindingGyp(e)}}}async function E1t(t,e,r,o,{installChangedByUser:a}){let n="";n+=`# Warning: This file is automatically generated. Removing it is fine, but will +`,n+=`# cause your node_modules installation to become invalidated. +`,n+=` +`,n+=`__metadata: +`,n+=` version: ${d1e} +`,n+=` nmMode: ${o.value} +`;let u=Array.from(e.keys()).sort(),A=G.stringifyLocator(t.topLevelWorkspace.anchoredLocator);for(let E of u){let I=e.get(E);n+=` +`,n+=`${JSON.stringify(E)}: +`,n+=` locations: +`;for(let v of I.locations){let x=K.contains(t.cwd,v);if(x===null)throw new Error(`Assertion failed: Expected the path to be within the project (${v})`);n+=` - ${JSON.stringify(x)} +`}if(I.aliases.length>0){n+=` aliases: +`;for(let v of I.aliases)n+=` - ${JSON.stringify(v)} +`}if(E===A&&r.size>0){n+=` bin: +`;for(let[v,x]of r){let C=K.contains(t.cwd,v);if(C===null)throw new Error(`Assertion failed: Expected the path to be within the project (${v})`);n+=` ${JSON.stringify(C)}: +`;for(let[R,L]of x){let U=K.relative(K.join(v,Di),L);n+=` ${JSON.stringify(R)}: ${JSON.stringify(U)} +`}}}}let p=t.cwd,h=K.join(p,Di,m1e);a&&await oe.removePromise(h),await oe.changeFilePromise(h,n,{automaticNewlines:!0})}async function lj(t,{unrollAliases:e=!1}={}){let r=t.cwd,o=K.join(r,Di,m1e),a;try{a=await oe.statPromise(o)}catch{}if(!a)return null;let n=Ki(await oe.readFilePromise(o,"utf8"));if(n.__metadata.version>d1e)return null;let u=n.__metadata.nmMode||"classic",A=new Map,p=new Map;delete n.__metadata;for(let[h,E]of Object.entries(n)){let I=E.locations.map(x=>K.join(r,x)),v=E.bin;if(v)for(let[x,C]of Object.entries(v)){let R=K.join(r,ue.toPortablePath(x)),L=He.getMapWithDefault(p,R);for(let[U,z]of Object.entries(C))L.set(U,ue.toPortablePath([R,Di,z].join(K.sep)))}if(A.set(h,{target:It.dot,linkType:"HARD",locations:I,aliases:E.aliases||[]}),e&&E.aliases)for(let x of E.aliases){let{scope:C,name:R}=G.parseLocator(h),L=G.makeLocator(G.makeIdent(C,R),x),U=G.stringifyLocator(L);A.set(U,{target:It.dot,linkType:"HARD",locations:I,aliases:[]})}}return{locatorMap:A,binSymlinks:p,locationTree:y1e(A,{skipPrefix:t.cwd}),nmMode:u,mtimeMs:a.mtimeMs}}var AC=async(t,e)=>{if(t.split(K.sep).indexOf(Di)<0)throw new Error(`Assertion failed: trying to remove dir that doesn't contain node_modules: ${t}`);try{if(!e.innerLoop){let o=e.allowSymlink?await oe.statPromise(t):await oe.lstatPromise(t);if(e.allowSymlink&&!o.isDirectory()||!e.allowSymlink&&o.isSymbolicLink()){await oe.unlinkPromise(t);return}}let r=await oe.readdirPromise(t,{withFileTypes:!0});for(let o of r){let a=K.join(t,o.name);o.isDirectory()?(o.name!==Di||e&&e.innerLoop)&&await AC(a,{innerLoop:!0,contentsOnly:!1}):await oe.unlinkPromise(a)}e.contentsOnly||await oe.rmdirPromise(t)}catch(r){if(r.code!=="ENOENT"&&r.code!=="ENOTEMPTY")throw r}},f1e=4,pQ=(t,{skipPrefix:e})=>{let r=K.contains(e,t);if(r===null)throw new Error(`Assertion failed: Writing attempt prevented to ${t} which is outside project root: ${e}`);let o=r.split(K.sep).filter(p=>p!==""),a=o.indexOf(Di),n=o.slice(0,a).join(K.sep),u=K.join(e,n),A=o.slice(a);return{locationRoot:u,segments:A}},y1e=(t,{skipPrefix:e})=>{let r=new Map;if(t===null)return r;let o=()=>({children:new Map,linkType:"HARD"});for(let[a,n]of t.entries()){if(n.linkType==="SOFT"&&K.contains(e,n.target)!==null){let A=He.getFactoryWithDefault(r,n.target,o);A.locator=a,A.linkType=n.linkType}for(let u of n.locations){let{locationRoot:A,segments:p}=pQ(u,{skipPrefix:e}),h=He.getFactoryWithDefault(r,A,o);for(let E=0;E{if(process.platform==="win32"&&r==="junctions"){let o;try{o=await oe.lstatPromise(t)}catch{}if(!o||o.isDirectory()){await oe.symlinkPromise(t,e,"junction");return}}await oe.symlinkPromise(K.relative(K.dirname(e),t),e)};async function E1e(t,e,r){let o=K.join(t,`${cj.default.randomBytes(16).toString("hex")}.tmp`);try{await oe.writeFilePromise(o,r);try{await oe.linkPromise(o,e)}catch{}}finally{await oe.unlinkPromise(o)}}async function C1t({srcPath:t,dstPath:e,entry:r,globalHardlinksStore:o,baseFs:a,nmMode:n}){if(r.kind==="file"){if(n.value==="hardlinks-global"&&o&&r.digest){let A=K.join(o,r.digest.substring(0,2),`${r.digest.substring(2)}.dat`),p;try{let h=await oe.statPromise(A);if(h&&(!r.mtimeMs||h.mtimeMs>r.mtimeMs||h.mtimeMs{await oe.mkdirPromise(t,{recursive:!0});let A=async(E=It.dot)=>{let I=K.join(e,E),v=await r.readdirPromise(I,{withFileTypes:!0}),x=new Map;for(let C of v){let R=K.join(E,C.name),L,U=K.join(I,C.name);if(C.isFile()){if(L={kind:"file",mode:(await r.lstatPromise(U)).mode},a.value==="hardlinks-global"){let z=await wn.checksumFile(U,{baseFs:r,algorithm:"sha1"});L.digest=z}}else if(C.isDirectory())L={kind:"directory"};else if(C.isSymbolicLink())L={kind:"symlink",symlinkTo:await r.readlinkPromise(U)};else throw new Error(`Unsupported file type (file: ${U}, mode: 0o${await r.statSync(U).mode.toString(8).padStart(6,"0")})`);if(x.set(R,L),C.isDirectory()&&R!==Di){let z=await A(R);for(let[te,ae]of z)x.set(te,ae)}}return x},p;if(a.value==="hardlinks-global"&&o&&u){let E=K.join(o,u.substring(0,2),`${u.substring(2)}.json`);try{p=new Map(Object.entries(JSON.parse(await oe.readFilePromise(E,"utf8"))))}catch{p=await A()}}else p=await A();let h=!1;for(let[E,I]of p){let v=K.join(e,E),x=K.join(t,E);if(I.kind==="directory")await oe.mkdirPromise(x,{recursive:!0});else if(I.kind==="file"){let C=I.mtimeMs;await C1t({srcPath:v,dstPath:x,entry:I,nmMode:a,baseFs:r,globalHardlinksStore:o}),I.mtimeMs!==C&&(h=!0)}else I.kind==="symlink"&&await Aj(K.resolve(K.dirname(x),I.symlinkTo),x,n)}if(a.value==="hardlinks-global"&&o&&h&&u){let E=K.join(o,u.substring(0,2),`${u.substring(2)}.json`);await oe.removePromise(E),await E1e(o,E,Buffer.from(JSON.stringify(Object.fromEntries(p))))}};function I1t(t,e,r,o){let a=new Map,n=new Map,u=new Map,A=!1,p=(h,E,I,v,x)=>{let C=!0,R=K.join(h,E),L=new Set;if(E===Di||E.startsWith("@")){let z;try{z=oe.statSync(R)}catch{}C=!!z,z?z.mtimeMs>r?(A=!0,L=new Set(oe.readdirSync(R))):L=new Set(I.children.get(E).children.keys()):A=!0;let te=e.get(h);if(te){let ae=K.join(h,Di,fQ),le;try{le=oe.statSync(ae)}catch{}if(!le)A=!0;else if(le.mtimeMs>r){A=!0;let ce=new Set(oe.readdirSync(ae)),Ce=new Map;n.set(h,Ce);for(let[de,Be]of te)ce.has(de)&&Ce.set(de,Be)}else n.set(h,te)}}else C=x.has(E);let U=I.children.get(E);if(C){let{linkType:z,locator:te}=U,ae={children:new Map,linkType:z,locator:te};if(v.children.set(E,ae),te){let le=He.getSetWithDefault(u,te);le.add(R),u.set(te,le)}for(let le of U.children.keys())p(R,le,U,ae,L)}else U.locator&&o.storedBuildState.delete(G.parseLocator(U.locator).locatorHash)};for(let[h,E]of t){let{linkType:I,locator:v}=E,x={children:new Map,linkType:I,locator:v};if(a.set(h,x),v){let C=He.getSetWithDefault(u,E.locator);C.add(h),u.set(E.locator,C)}E.children.has(Di)&&p(h,Di,E,x,new Set)}return{locationTree:a,binSymlinks:n,locatorLocations:u,installChangedByUser:A}}function C1e(t){let e=G.parseDescriptor(t);return G.isVirtualDescriptor(e)&&(e=G.devirtualizeDescriptor(e)),e.range.startsWith("link:")}async function B1t(t,e,r,{loadManifest:o}){let a=new Map;for(let[A,{locations:p}]of t){let h=C1e(A)?null:await o(A,p[0]),E=new Map;if(h)for(let[I,v]of h.bin){let x=K.join(p[0],v);v!==""&&oe.existsSync(x)&&E.set(I,v)}a.set(A,E)}let n=new Map,u=(A,p,h)=>{let E=new Map,I=K.contains(r,A);if(h.locator&&I!==null){let v=a.get(h.locator);for(let[x,C]of v){let R=K.join(A,ue.toPortablePath(C));E.set(x,R)}for(let[x,C]of h.children){let R=K.join(A,x),L=u(R,R,C);L.size>0&&n.set(A,new Map([...n.get(A)||new Map,...L]))}}else for(let[v,x]of h.children){let C=u(K.join(A,v),p,x);for(let[R,L]of C)E.set(R,L)}return E};for(let[A,p]of e){let h=u(A,A,p);h.size>0&&n.set(A,new Map([...n.get(A)||new Map,...h]))}return n}var p1e=(t,e)=>{if(!t||!e)return t===e;let r=G.parseLocator(t);G.isVirtualLocator(r)&&(r=G.devirtualizeLocator(r));let o=G.parseLocator(e);return G.isVirtualLocator(o)&&(o=G.devirtualizeLocator(o)),G.areLocatorsEqual(r,o)};function fj(t){return K.join(t.get("globalFolder"),"store")}async function v1t(t,e,{baseFs:r,project:o,report:a,loadManifest:n,realLocatorChecksums:u}){let A=K.join(o.cwd,Di),{locationTree:p,binSymlinks:h,locatorLocations:E,installChangedByUser:I}=I1t(t.locationTree,t.binSymlinks,t.mtimeMs,o),v=y1e(e,{skipPrefix:o.cwd}),x=[],C=async({srcDir:Be,dstDir:Ee,linkType:g,globalHardlinksStore:me,nmMode:we,windowsLinkType:Ae,packageChecksum:ne})=>{let Z=(async()=>{try{g==="SOFT"?(await oe.mkdirPromise(K.dirname(Ee),{recursive:!0}),await Aj(K.resolve(Be),Ee,Ae)):await w1t(Ee,Be,{baseFs:r,globalHardlinksStore:me,nmMode:we,windowsLinkType:Ae,packageChecksum:ne})}catch(xe){throw xe.message=`While persisting ${Be} -> ${Ee} ${xe.message}`,xe}finally{ae.tick()}})().then(()=>x.splice(x.indexOf(Z),1));x.push(Z),x.length>f1e&&await Promise.race(x)},R=async(Be,Ee,g)=>{let me=(async()=>{let we=async(Ae,ne,Z)=>{try{Z.innerLoop||await oe.mkdirPromise(ne,{recursive:!0});let xe=await oe.readdirPromise(Ae,{withFileTypes:!0});for(let Ne of xe){if(!Z.innerLoop&&Ne.name===fQ)continue;let ht=K.join(Ae,Ne.name),H=K.join(ne,Ne.name);Ne.isDirectory()?(Ne.name!==Di||Z&&Z.innerLoop)&&(await oe.mkdirPromise(H,{recursive:!0}),await we(ht,H,{...Z,innerLoop:!0})):Ce.value==="hardlinks-local"||Ce.value==="hardlinks-global"?await oe.linkPromise(ht,H):await oe.copyFilePromise(ht,H,g1e.default.constants.COPYFILE_FICLONE)}}catch(xe){throw Z.innerLoop||(xe.message=`While cloning ${Ae} -> ${ne} ${xe.message}`),xe}finally{Z.innerLoop||ae.tick()}};await we(Be,Ee,g)})().then(()=>x.splice(x.indexOf(me),1));x.push(me),x.length>f1e&&await Promise.race(x)},L=async(Be,Ee,g)=>{if(g)for(let[me,we]of Ee.children){let Ae=g.children.get(me);await L(K.join(Be,me),we,Ae)}else{Ee.children.has(Di)&&await AC(K.join(Be,Di),{contentsOnly:!1});let me=K.basename(Be)===Di&&v.has(K.join(K.dirname(Be),K.sep));await AC(Be,{contentsOnly:Be===A,allowSymlink:me})}};for(let[Be,Ee]of p){let g=v.get(Be);for(let[me,we]of Ee.children){if(me===".")continue;let Ae=g&&g.children.get(me),ne=K.join(Be,me);await L(ne,we,Ae)}}let U=async(Be,Ee,g)=>{if(g){p1e(Ee.locator,g.locator)||await AC(Be,{contentsOnly:Ee.linkType==="HARD"});for(let[me,we]of Ee.children){let Ae=g.children.get(me);await U(K.join(Be,me),we,Ae)}}else{Ee.children.has(Di)&&await AC(K.join(Be,Di),{contentsOnly:!0});let me=K.basename(Be)===Di&&v.has(K.join(K.dirname(Be),K.sep));await AC(Be,{contentsOnly:Ee.linkType==="HARD",allowSymlink:me})}};for(let[Be,Ee]of v){let g=p.get(Be);for(let[me,we]of Ee.children){if(me===".")continue;let Ae=g&&g.children.get(me);await U(K.join(Be,me),we,Ae)}}let z=new Map,te=[];for(let[Be,Ee]of E)for(let g of Ee){let{locationRoot:me,segments:we}=pQ(g,{skipPrefix:o.cwd}),Ae=v.get(me),ne=me;if(Ae){for(let Z of we)if(ne=K.join(ne,Z),Ae=Ae.children.get(Z),!Ae)break;if(Ae){let Z=p1e(Ae.locator,Be),xe=e.get(Ae.locator),Ne=xe.target,ht=ne,H=xe.linkType;if(Z)z.has(Ne)||z.set(Ne,ht);else if(Ne!==ht){let rt=G.parseLocator(Ae.locator);G.isVirtualLocator(rt)&&(rt=G.devirtualizeLocator(rt)),te.push({srcDir:Ne,dstDir:ht,linkType:H,realLocatorHash:rt.locatorHash})}}}}for(let[Be,{locations:Ee}]of e.entries())for(let g of Ee){let{locationRoot:me,segments:we}=pQ(g,{skipPrefix:o.cwd}),Ae=p.get(me),ne=v.get(me),Z=me,xe=e.get(Be),Ne=G.parseLocator(Be);G.isVirtualLocator(Ne)&&(Ne=G.devirtualizeLocator(Ne));let ht=Ne.locatorHash,H=xe.target,rt=g;if(H===rt)continue;let Te=xe.linkType;for(let Fe of we)ne=ne.children.get(Fe);if(!Ae)te.push({srcDir:H,dstDir:rt,linkType:Te,realLocatorHash:ht});else for(let Fe of we)if(Z=K.join(Z,Fe),Ae=Ae.children.get(Fe),!Ae){te.push({srcDir:H,dstDir:rt,linkType:Te,realLocatorHash:ht});break}}let ae=Zs.progressViaCounter(te.length),le=a.reportProgress(ae),ce=o.configuration.get("nmMode"),Ce={value:ce},de=o.configuration.get("winLinkType");try{let Be=Ce.value==="hardlinks-global"?`${fj(o.configuration)}/v1`:null;if(Be&&!await oe.existsPromise(Be)){await oe.mkdirpPromise(Be);for(let g=0;g<256;g++)await oe.mkdirPromise(K.join(Be,g.toString(16).padStart(2,"0")))}for(let g of te)(g.linkType==="SOFT"||!z.has(g.srcDir))&&(z.set(g.srcDir,g.dstDir),await C({...g,globalHardlinksStore:Be,nmMode:Ce,windowsLinkType:de,packageChecksum:u.get(g.realLocatorHash)||null}));await Promise.all(x),x.length=0;for(let g of te){let me=z.get(g.srcDir);g.linkType!=="SOFT"&&g.dstDir!==me&&await R(me,g.dstDir,{nmMode:Ce})}await Promise.all(x),await oe.mkdirPromise(A,{recursive:!0});let Ee=await B1t(e,v,o.cwd,{loadManifest:n});await D1t(h,Ee,o.cwd,de),await E1t(o,e,Ee,Ce,{installChangedByUser:I}),ce=="hardlinks-global"&&Ce.value=="hardlinks-local"&&a.reportWarningOnce(74,"'nmMode' has been downgraded to 'hardlinks-local' due to global cache and install folder being on different devices")}finally{le.stop()}}async function D1t(t,e,r,o){for(let a of t.keys()){if(K.contains(r,a)===null)throw new Error(`Assertion failed. Excepted bin symlink location to be inside project dir, instead it was at ${a}`);if(!e.has(a)){let n=K.join(a,Di,fQ);await oe.removePromise(n)}}for(let[a,n]of e){if(K.contains(r,a)===null)throw new Error(`Assertion failed. Excepted bin symlink location to be inside project dir, instead it was at ${a}`);let u=K.join(a,Di,fQ),A=t.get(a)||new Map;await oe.mkdirPromise(u,{recursive:!0});for(let p of A.keys())n.has(p)||(await oe.removePromise(K.join(u,p)),process.platform==="win32"&&await oe.removePromise(K.join(u,`${p}.cmd`)));for(let[p,h]of n){let E=A.get(p),I=K.join(u,p);E!==h&&(process.platform==="win32"?await(0,h1e.default)(ue.fromPortablePath(h),ue.fromPortablePath(I),{createPwshFile:!1}):(await oe.removePromise(I),await Aj(h,I,o),K.contains(r,await oe.realpathPromise(h))!==null&&await oe.chmodPromise(h,493)))}}}Ge();Pt();nA();var YB=class extends Hh{constructor(){super(...arguments);this.mode="loose"}makeInstaller(r){return new pj(r)}},pj=class extends sd{constructor(){super(...arguments);this.mode="loose"}async transformPnpSettings(r){let o=new zs({baseFs:new rA({maxOpenFiles:80,readOnlyArchives:!0})}),a=e1e(r,this.opts.project.cwd,o),{tree:n,errors:u}=FB(a,{pnpifyFs:!1,project:this.opts.project});if(!n){for(let{messageName:I,text:v}of u)this.opts.report.reportError(I,v);return}let A=new Map;r.fallbackPool=A;let p=(I,v)=>{let x=G.parseLocator(v.locator),C=G.stringifyIdent(x);C===I?A.set(I,x.reference):A.set(I,[C,x.reference])},h=K.join(this.opts.project.cwd,dr.nodeModules),E=n.get(h);if(!(typeof E>"u")){if("target"in E)throw new Error("Assertion failed: Expected the root junction point to be a directory");for(let I of E.dirList){let v=K.join(h,I),x=n.get(v);if(typeof x>"u")throw new Error("Assertion failed: Expected the child to have been registered");if("target"in x)p(I,x);else for(let C of x.dirList){let R=K.join(v,C),L=n.get(R);if(typeof L>"u")throw new Error("Assertion failed: Expected the subchild to have been registered");if("target"in L)p(`${I}/${C}`,L);else throw new Error("Assertion failed: Expected the leaf junction to be a package")}}}}};var P1t={hooks:{cleanGlobalArtifacts:async t=>{let e=fj(t);await oe.removePromise(e)}},configuration:{nmHoistingLimits:{description:"Prevents packages to be hoisted past specific levels",type:"STRING",values:["workspaces","dependencies","none"],default:"none"},nmMode:{description:"Defines in which measure Yarn must use hardlinks and symlinks when generated `node_modules` directories.",type:"STRING",values:["classic","hardlinks-local","hardlinks-global"],default:"classic"},nmSelfReferences:{description:"Defines whether the linker should generate self-referencing symlinks for workspaces.",type:"BOOLEAN",default:!0}},linkers:[GB,YB]},S1t=P1t;var f5={};Vt(f5,{NpmHttpFetcher:()=>VB,NpmRemapResolver:()=>zB,NpmSemverFetcher:()=>tp,NpmSemverResolver:()=>JB,NpmTagResolver:()=>XB,default:()=>qvt,npmConfigUtils:()=>Zn,npmHttpUtils:()=>Zr,npmPublishUtils:()=>PC});Ge();var b1e=Ze(Jn());var Wn="npm:";var Zr={};Vt(Zr,{AuthType:()=>D1e,customPackageError:()=>od,del:()=>U1t,get:()=>ad,getIdentUrl:()=>hQ,getPackageMetadata:()=>hC,handleInvalidAuthenticationError:()=>jh,post:()=>M1t,put:()=>O1t});Ge();Ge();Pt();var mj=Ze(J1()),B1e=Ze(y_()),v1e=Ze(Jn());var Zn={};Vt(Zn,{RegistryType:()=>w1e,getAuditRegistry:()=>b1t,getAuthConfiguration:()=>dj,getDefaultRegistry:()=>WB,getPublishRegistry:()=>x1t,getRegistryConfiguration:()=>I1e,getScopeConfiguration:()=>gj,getScopeRegistry:()=>fC,normalizeRegistry:()=>ac});var w1e=(o=>(o.AUDIT_REGISTRY="npmAuditRegistry",o.FETCH_REGISTRY="npmRegistryServer",o.PUBLISH_REGISTRY="npmPublishRegistry",o))(w1e||{});function ac(t){return t.replace(/\/$/,"")}function b1t({configuration:t}){return WB({configuration:t,type:"npmAuditRegistry"})}function x1t(t,{configuration:e}){return t.publishConfig?.registry?ac(t.publishConfig.registry):t.name?fC(t.name.scope,{configuration:e,type:"npmPublishRegistry"}):WB({configuration:e,type:"npmPublishRegistry"})}function fC(t,{configuration:e,type:r="npmRegistryServer"}){let o=gj(t,{configuration:e});if(o===null)return WB({configuration:e,type:r});let a=o.get(r);return a===null?WB({configuration:e,type:r}):ac(a)}function WB({configuration:t,type:e="npmRegistryServer"}){let r=t.get(e);return ac(r!==null?r:t.get("npmRegistryServer"))}function I1e(t,{configuration:e}){let r=e.get("npmRegistries"),o=ac(t),a=r.get(o);if(typeof a<"u")return a;let n=r.get(o.replace(/^[a-z]+:/,""));return typeof n<"u"?n:null}function gj(t,{configuration:e}){if(t===null)return null;let o=e.get("npmScopes").get(t);return o||null}function dj(t,{configuration:e,ident:r}){let o=r&&gj(r.scope,{configuration:e});return o?.get("npmAuthIdent")||o?.get("npmAuthToken")?o:I1e(t,{configuration:e})||e}var D1e=(a=>(a[a.NO_AUTH=0]="NO_AUTH",a[a.BEST_EFFORT=1]="BEST_EFFORT",a[a.CONFIGURATION=2]="CONFIGURATION",a[a.ALWAYS_AUTH=3]="ALWAYS_AUTH",a))(D1e||{});async function jh(t,{attemptedAs:e,registry:r,headers:o,configuration:a}){if(dQ(t))throw new Jt(41,"Invalid OTP token");if(t.originalError?.name==="HTTPError"&&t.originalError?.response.statusCode===401)throw new Jt(41,`Invalid authentication (${typeof e!="string"?`as ${await H1t(r,o,{configuration:a})}`:`attempted as ${e}`})`)}function od(t,e){let r=t.response?.statusCode;return r?r===404?"Package not found":r>=500&&r<600?`The registry appears to be down (using a ${pe.applyHyperlink(e,"local cache","https://yarnpkg.com/advanced/lexicon#local-cache")} might have protected you against such outages)`:null:null}function hQ(t){return t.scope?`/@${t.scope}%2f${t.name}`:`/${t.name}`}var P1e=new Map,k1t=new Map;async function Q1t(t){return await He.getFactoryWithDefault(P1e,t,async()=>{let e=null;try{e=await oe.readJsonPromise(t)}catch{}return e})}async function F1t(t,e,{configuration:r,cached:o,registry:a,headers:n,version:u,...A}){return await He.getFactoryWithDefault(k1t,t,async()=>await ad(hQ(e),{...A,customErrorMessage:od,configuration:r,registry:a,ident:e,headers:{...n,"If-None-Match":o?.etag,"If-Modified-Since":o?.lastModified},wrapNetworkRequest:async p=>async()=>{let h=await p();if(h.statusCode===304){if(o===null)throw new Error("Assertion failed: cachedMetadata should not be null");return{...h,body:o.metadata}}let E=R1t(JSON.parse(h.body.toString())),I={metadata:E,etag:h.headers.etag,lastModified:h.headers["last-modified"]};return P1e.set(t,Promise.resolve(I)),Promise.resolve().then(async()=>{let v=`${t}-${process.pid}.tmp`;await oe.mkdirPromise(K.dirname(v),{recursive:!0}),await oe.writeJsonPromise(v,I,{compact:!0}),await oe.renamePromise(v,t)}).catch(()=>{}),{...h,body:E}}}))}async function hC(t,{cache:e,project:r,registry:o,headers:a,version:n,...u}){let{configuration:A}=r;o=KB(A,{ident:t,registry:o});let p=N1t(A,o),h=K.join(p,`${G.slugifyIdent(t)}.json`),E=null;if(!r.lockfileNeedsRefresh&&(E=await Q1t(h),E)){if(typeof n<"u"&&typeof E.metadata.versions[n]<"u")return E.metadata;if(A.get("enableOfflineMode")){let I=structuredClone(E.metadata),v=new Set;if(e){for(let C of Object.keys(I.versions)){let R=G.makeLocator(t,`npm:${C}`),L=e.getLocatorMirrorPath(R);(!L||!oe.existsSync(L))&&(delete I.versions[C],v.add(C))}let x=I["dist-tags"].latest;if(v.has(x)){let C=Object.keys(E.metadata.versions).sort(v1e.default.compare),R=C.indexOf(x);for(;v.has(C[R])&&R>=0;)R-=1;R>=0?I["dist-tags"].latest=C[R]:delete I["dist-tags"].latest}}return I}}return await F1t(h,t,{...u,configuration:A,cached:E,registry:o,headers:a,version:n})}var S1e=["name","dist.tarball","bin","scripts","os","cpu","libc","dependencies","dependenciesMeta","optionalDependencies","peerDependencies","peerDependenciesMeta","deprecated"];function R1t(t){return{"dist-tags":t["dist-tags"],versions:Object.fromEntries(Object.entries(t.versions).map(([e,r])=>[e,(0,B1e.default)(r,S1e)]))}}var T1t=wn.makeHash(...S1e).slice(0,6);function N1t(t,e){let r=L1t(t),o=new URL(e);return K.join(r,T1t,o.hostname)}function L1t(t){return K.join(t.get("globalFolder"),"metadata/npm")}async function ad(t,{configuration:e,headers:r,ident:o,authType:a,registry:n,...u}){n=KB(e,{ident:o,registry:n}),o&&o.scope&&typeof a>"u"&&(a=1);let A=await gQ(n,{authType:a,configuration:e,ident:o});A&&(r={...r,authorization:A});try{return await sn.get(t.charAt(0)==="/"?`${n}${t}`:t,{configuration:e,headers:r,...u})}catch(p){throw await jh(p,{registry:n,configuration:e,headers:r}),p}}async function M1t(t,e,{attemptedAs:r,configuration:o,headers:a,ident:n,authType:u=3,registry:A,otp:p,...h}){A=KB(o,{ident:n,registry:A});let E=await gQ(A,{authType:u,configuration:o,ident:n});E&&(a={...a,authorization:E}),p&&(a={...a,...pC(p)});try{return await sn.post(A+t,e,{configuration:o,headers:a,...h})}catch(I){if(!dQ(I)||p)throw await jh(I,{attemptedAs:r,registry:A,configuration:o,headers:a}),I;p=await yj(I,{configuration:o});let v={...a,...pC(p)};try{return await sn.post(`${A}${t}`,e,{configuration:o,headers:v,...h})}catch(x){throw await jh(x,{attemptedAs:r,registry:A,configuration:o,headers:a}),x}}}async function O1t(t,e,{attemptedAs:r,configuration:o,headers:a,ident:n,authType:u=3,registry:A,otp:p,...h}){A=KB(o,{ident:n,registry:A});let E=await gQ(A,{authType:u,configuration:o,ident:n});E&&(a={...a,authorization:E}),p&&(a={...a,...pC(p)});try{return await sn.put(A+t,e,{configuration:o,headers:a,...h})}catch(I){if(!dQ(I))throw await jh(I,{attemptedAs:r,registry:A,configuration:o,headers:a}),I;p=await yj(I,{configuration:o});let v={...a,...pC(p)};try{return await sn.put(`${A}${t}`,e,{configuration:o,headers:v,...h})}catch(x){throw await jh(x,{attemptedAs:r,registry:A,configuration:o,headers:a}),x}}}async function U1t(t,{attemptedAs:e,configuration:r,headers:o,ident:a,authType:n=3,registry:u,otp:A,...p}){u=KB(r,{ident:a,registry:u});let h=await gQ(u,{authType:n,configuration:r,ident:a});h&&(o={...o,authorization:h}),A&&(o={...o,...pC(A)});try{return await sn.del(u+t,{configuration:r,headers:o,...p})}catch(E){if(!dQ(E)||A)throw await jh(E,{attemptedAs:e,registry:u,configuration:r,headers:o}),E;A=await yj(E,{configuration:r});let I={...o,...pC(A)};try{return await sn.del(`${u}${t}`,{configuration:r,headers:I,...p})}catch(v){throw await jh(v,{attemptedAs:e,registry:u,configuration:r,headers:o}),v}}}function KB(t,{ident:e,registry:r}){if(typeof r>"u"&&e)return fC(e.scope,{configuration:t});if(typeof r!="string")throw new Error("Assertion failed: The registry should be a string");return ac(r)}async function gQ(t,{authType:e=2,configuration:r,ident:o}){let a=dj(t,{configuration:r,ident:o}),n=_1t(a,e);if(!n)return null;let u=await r.reduceHook(A=>A.getNpmAuthenticationHeader,void 0,t,{configuration:r,ident:o});if(u)return u;if(a.get("npmAuthToken"))return`Bearer ${a.get("npmAuthToken")}`;if(a.get("npmAuthIdent")){let A=a.get("npmAuthIdent");return A.includes(":")?`Basic ${Buffer.from(A).toString("base64")}`:`Basic ${A}`}if(n&&e!==1)throw new Jt(33,"No authentication configured for request");return null}function _1t(t,e){switch(e){case 2:return t.get("npmAlwaysAuth");case 1:case 3:return!0;case 0:return!1;default:throw new Error("Unreachable")}}async function H1t(t,e,{configuration:r}){if(typeof e>"u"||typeof e.authorization>"u")return"an anonymous user";try{return(await sn.get(new URL(`${t}/-/whoami`).href,{configuration:r,headers:e,jsonResponse:!0})).username??"an unknown user"}catch{return"an unknown user"}}async function yj(t,{configuration:e}){let r=t.originalError?.response.headers["npm-notice"];if(r&&(await Rt.start({configuration:e,stdout:process.stdout,includeFooter:!1},async a=>{if(a.reportInfo(0,r.replace(/(https?:\/\/\S+)/g,pe.pretty(e,"$1",pe.Type.URL))),!process.env.YARN_IS_TEST_ENV){let n=r.match(/open (https?:\/\/\S+)/i);if(n&&Xi.openUrl){let{openNow:u}=await(0,mj.prompt)({type:"confirm",name:"openNow",message:"Do you want to try to open this url now?",required:!0,initial:!0,onCancel:()=>process.exit(130)});u&&(await Xi.openUrl(n[1])||(a.reportSeparator(),a.reportWarning(0,"We failed to automatically open the url; you'll have to open it yourself in your browser of choice.")))}}}),process.stdout.write(` +`)),process.env.YARN_IS_TEST_ENV)return process.env.YARN_INJECT_NPM_2FA_TOKEN||"";let{otp:o}=await(0,mj.prompt)({type:"password",name:"otp",message:"One-time password:",required:!0,onCancel:()=>process.exit(130)});return process.stdout.write(` +`),o}function dQ(t){if(t.originalError?.name!=="HTTPError")return!1;try{return(t.originalError?.response.headers["www-authenticate"].split(/,\s*/).map(r=>r.toLowerCase())).includes("otp")}catch{return!1}}function pC(t){return{"npm-otp":t}}var VB=class{supports(e,r){if(!e.reference.startsWith(Wn))return!1;let{selector:o,params:a}=G.parseRange(e.reference);return!(!b1e.default.valid(o)||a===null||typeof a.__archiveUrl!="string")}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the remote server`),loader:()=>this.fetchFromNetwork(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),checksum:u}}async fetchFromNetwork(e,r){let{params:o}=G.parseRange(e.reference);if(o===null||typeof o.__archiveUrl!="string")throw new Error("Assertion failed: The archiveUrl querystring parameter should have been available");let a=await ad(o.__archiveUrl,{customErrorMessage:od,configuration:r.project.configuration,ident:e});return await $i.convertToZip(a,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1})}};Ge();var zB=class{supportsDescriptor(e,r){return!(!e.range.startsWith(Wn)||!G.tryParseDescriptor(e.range.slice(Wn.length),!0))}supportsLocator(e,r){return!1}shouldPersistResolution(e,r){throw new Error("Unreachable")}bindDescriptor(e,r,o){return e}getResolutionDependencies(e,r){let o=r.project.configuration.normalizeDependency(G.parseDescriptor(e.range.slice(Wn.length),!0));return r.resolver.getResolutionDependencies(o,r)}async getCandidates(e,r,o){let a=o.project.configuration.normalizeDependency(G.parseDescriptor(e.range.slice(Wn.length),!0));return await o.resolver.getCandidates(a,r,o)}async getSatisfying(e,r,o,a){let n=a.project.configuration.normalizeDependency(G.parseDescriptor(e.range.slice(Wn.length),!0));return a.resolver.getSatisfying(n,r,o,a)}resolve(e,r){throw new Error("Unreachable")}};Ge();Ge();var x1e=Ze(Jn());var tp=class t{supports(e,r){if(!e.reference.startsWith(Wn))return!1;let o=new URL(e.reference);return!(!x1e.default.valid(o.pathname)||o.searchParams.has("__archiveUrl"))}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the remote registry`),loader:()=>this.fetchFromNetwork(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),checksum:u}}async fetchFromNetwork(e,r){let o;try{o=await ad(t.getLocatorUrl(e),{customErrorMessage:od,configuration:r.project.configuration,ident:e})}catch{o=await ad(t.getLocatorUrl(e).replace(/%2f/g,"/"),{customErrorMessage:od,configuration:r.project.configuration,ident:e})}return await $i.convertToZip(o,{configuration:r.project.configuration,prefixPath:G.getIdentVendorPath(e),stripComponents:1})}static isConventionalTarballUrl(e,r,{configuration:o}){let a=fC(e.scope,{configuration:o}),n=t.getLocatorUrl(e);return r=r.replace(/^https?:(\/\/(?:[^/]+\.)?npmjs.org(?:$|\/))/,"https:$1"),a=a.replace(/^https:\/\/registry\.npmjs\.org($|\/)/,"https://registry.yarnpkg.com$1"),r=r.replace(/^https:\/\/registry\.npmjs\.org($|\/)/,"https://registry.yarnpkg.com$1"),r===a+n||r===a+n.replace(/%2f/g,"/")}static getLocatorUrl(e){let r=Lr.clean(e.reference.slice(Wn.length));if(r===null)throw new Jt(10,"The npm semver resolver got selected, but the version isn't semver");return`${hQ(e)}/-/${e.name}-${r}.tgz`}};Ge();Ge();Ge();var Ej=Ze(Jn());var mQ=G.makeIdent(null,"node-gyp"),q1t=/\b(node-gyp|prebuild-install)\b/,JB=class{supportsDescriptor(e,r){return e.range.startsWith(Wn)?!!Lr.validRange(e.range.slice(Wn.length)):!1}supportsLocator(e,r){if(!e.reference.startsWith(Wn))return!1;let{selector:o}=G.parseRange(e.reference);return!!Ej.default.valid(o)}shouldPersistResolution(e,r){return!0}bindDescriptor(e,r,o){return e}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){let a=Lr.validRange(e.range.slice(Wn.length));if(a===null)throw new Error(`Expected a valid range, got ${e.range.slice(Wn.length)}`);let n=await hC(e,{cache:o.fetchOptions?.cache,project:o.project,version:Ej.default.valid(a.raw)?a.raw:void 0}),u=He.mapAndFilter(Object.keys(n.versions),h=>{try{let E=new Lr.SemVer(h);if(a.test(E))return E}catch{}return He.mapAndFilter.skip}),A=u.filter(h=>!n.versions[h.raw].deprecated),p=A.length>0?A:u;return p.sort((h,E)=>-h.compare(E)),p.map(h=>{let E=G.makeLocator(e,`${Wn}${h.raw}`),I=n.versions[h.raw].dist.tarball;return tp.isConventionalTarballUrl(E,I,{configuration:o.project.configuration})?E:G.bindLocator(E,{__archiveUrl:I})})}async getSatisfying(e,r,o,a){let n=Lr.validRange(e.range.slice(Wn.length));if(n===null)throw new Error(`Expected a valid range, got ${e.range.slice(Wn.length)}`);return{locators:He.mapAndFilter(o,p=>{if(p.identHash!==e.identHash)return He.mapAndFilter.skip;let h=G.tryParseRange(p.reference,{requireProtocol:Wn});if(!h)return He.mapAndFilter.skip;let E=new Lr.SemVer(h.selector);return n.test(E)?{locator:p,version:E}:He.mapAndFilter.skip}).sort((p,h)=>-p.version.compare(h.version)).map(({locator:p})=>p),sorted:!0}}async resolve(e,r){let{selector:o}=G.parseRange(e.reference),a=Lr.clean(o);if(a===null)throw new Jt(10,"The npm semver resolver got selected, but the version isn't semver");let n=await hC(e,{cache:r.fetchOptions?.cache,project:r.project,version:a});if(!Object.hasOwn(n,"versions"))throw new Jt(15,'Registry returned invalid data for - missing "versions" field');if(!Object.hasOwn(n.versions,a))throw new Jt(16,`Registry failed to return reference "${a}"`);let u=new Ut;if(u.load(n.versions[a]),!u.dependencies.has(mQ.identHash)&&!u.peerDependencies.has(mQ.identHash)){for(let A of u.scripts.values())if(A.match(q1t)){u.dependencies.set(mQ.identHash,G.makeDescriptor(mQ,"latest"));break}}return{...e,version:a,languageName:"node",linkType:"HARD",conditions:u.getConditions(),dependencies:r.project.configuration.normalizeDependencyMap(u.dependencies),peerDependencies:u.peerDependencies,dependenciesMeta:u.dependenciesMeta,peerDependenciesMeta:u.peerDependenciesMeta,bin:u.bin}}};Ge();Ge();var k1e=Ze(Jn());var XB=class{supportsDescriptor(e,r){return!(!e.range.startsWith(Wn)||!ly.test(e.range.slice(Wn.length)))}supportsLocator(e,r){return!1}shouldPersistResolution(e,r){throw new Error("Unreachable")}bindDescriptor(e,r,o){return e}getResolutionDependencies(e,r){return{}}async getCandidates(e,r,o){let a=e.range.slice(Wn.length),n=await hC(e,{cache:o.fetchOptions?.cache,project:o.project});if(!Object.hasOwn(n,"dist-tags"))throw new Jt(15,'Registry returned invalid data - missing "dist-tags" field');let u=n["dist-tags"];if(!Object.hasOwn(u,a))throw new Jt(16,`Registry failed to return tag "${a}"`);let A=u[a],p=G.makeLocator(e,`${Wn}${A}`),h=n.versions[A].dist.tarball;return tp.isConventionalTarballUrl(p,h,{configuration:o.project.configuration})?[p]:[G.bindLocator(p,{__archiveUrl:h})]}async getSatisfying(e,r,o,a){let n=[];for(let u of o){if(u.identHash!==e.identHash)continue;let A=G.tryParseRange(u.reference,{requireProtocol:Wn});if(!(!A||!k1e.default.valid(A.selector))){if(A.params?.__archiveUrl){let p=G.makeRange({protocol:Wn,selector:A.selector,source:null,params:null}),[h]=await a.resolver.getCandidates(G.makeDescriptor(e,p),r,a);if(u.reference!==h.reference)continue}n.push(u)}}return{locators:n,sorted:!1}}async resolve(e,r){throw new Error("Unreachable")}};var PC={};Vt(PC,{getGitHead:()=>_vt,getPublishAccess:()=>EBe,getReadmeContent:()=>CBe,makePublishBody:()=>Uvt});Ge();Ge();Pt();var a5={};Vt(a5,{PackCommand:()=>DC,default:()=>wvt,packUtils:()=>CA});Ge();Ge();Ge();Pt();qt();var CA={};Vt(CA,{genPackList:()=>_Q,genPackStream:()=>o5,genPackageManifest:()=>aBe,hasPackScripts:()=>i5,prepareForPack:()=>s5});Ge();Pt();var n5=Ze($o()),sBe=Ze(tBe()),oBe=ve("zlib"),uvt=["/package.json","/readme","/readme.*","/license","/license.*","/licence","/licence.*","/changelog","/changelog.*"],Avt=["/package.tgz",".github",".git",".hg","node_modules",".npmignore",".gitignore",".#*",".DS_Store"];async function i5(t){return!!(An.hasWorkspaceScript(t,"prepack")||An.hasWorkspaceScript(t,"postpack"))}async function s5(t,{report:e},r){await An.maybeExecuteWorkspaceLifecycleScript(t,"prepack",{report:e});try{let o=K.join(t.cwd,Ut.fileName);await oe.existsPromise(o)&&await t.manifest.loadFile(o,{baseFs:oe}),await r()}finally{await An.maybeExecuteWorkspaceLifecycleScript(t,"postpack",{report:e})}}async function o5(t,e){typeof e>"u"&&(e=await _Q(t));let r=new Set;for(let n of t.manifest.publishConfig?.executableFiles??new Set)r.add(K.normalize(n));for(let n of t.manifest.bin.values())r.add(K.normalize(n));let o=sBe.default.pack();process.nextTick(async()=>{for(let n of e){let u=K.normalize(n),A=K.resolve(t.cwd,u),p=K.join("package",u),h=await oe.lstatPromise(A),E={name:p,mtime:new Date(Bi.SAFE_TIME*1e3)},I=r.has(u)?493:420,v,x,C=new Promise((L,U)=>{v=L,x=U}),R=L=>{L?x(L):v()};if(h.isFile()){let L;u==="package.json"?L=Buffer.from(JSON.stringify(await aBe(t),null,2)):L=await oe.readFilePromise(A),o.entry({...E,mode:I,type:"file"},L,R)}else h.isSymbolicLink()?o.entry({...E,mode:I,type:"symlink",linkname:await oe.readlinkPromise(A)},R):R(new Error(`Unsupported file type ${h.mode} for ${ue.fromPortablePath(u)}`));await C}o.finalize()});let a=(0,oBe.createGzip)();return o.pipe(a),a}async function aBe(t){let e=JSON.parse(JSON.stringify(t.manifest.raw));return await t.project.configuration.triggerHook(r=>r.beforeWorkspacePacking,t,e),e}async function _Q(t){let e=t.project,r=e.configuration,o={accept:[],reject:[]};for(let I of Avt)o.reject.push(I);for(let I of uvt)o.accept.push(I);o.reject.push(r.get("rcFilename"));let a=I=>{if(I===null||!I.startsWith(`${t.cwd}/`))return;let v=K.relative(t.cwd,I),x=K.resolve(It.root,v);o.reject.push(x)};a(K.resolve(e.cwd,dr.lockfile)),a(r.get("cacheFolder")),a(r.get("globalFolder")),a(r.get("installStatePath")),a(r.get("virtualFolder")),a(r.get("yarnPath")),await r.triggerHook(I=>I.populateYarnPaths,e,I=>{a(I)});for(let I of e.workspaces){let v=K.relative(t.cwd,I.cwd);v!==""&&!v.match(/^(\.\.)?\//)&&o.reject.push(`/${v}`)}let n={accept:[],reject:[]},u=t.manifest.publishConfig?.main??t.manifest.main,A=t.manifest.publishConfig?.module??t.manifest.module,p=t.manifest.publishConfig?.browser??t.manifest.browser,h=t.manifest.publishConfig?.bin??t.manifest.bin;u!=null&&n.accept.push(K.resolve(It.root,u)),A!=null&&n.accept.push(K.resolve(It.root,A)),typeof p=="string"&&n.accept.push(K.resolve(It.root,p));for(let I of h.values())n.accept.push(K.resolve(It.root,I));if(p instanceof Map)for(let[I,v]of p.entries())n.accept.push(K.resolve(It.root,I)),typeof v=="string"&&n.accept.push(K.resolve(It.root,v));let E=t.manifest.files!==null;if(E){n.reject.push("/*");for(let I of t.manifest.files)lBe(n.accept,I,{cwd:It.root})}return await fvt(t.cwd,{hasExplicitFileList:E,globalList:o,ignoreList:n})}async function fvt(t,{hasExplicitFileList:e,globalList:r,ignoreList:o}){let a=[],n=new qu(t),u=[[It.root,[o]]];for(;u.length>0;){let[A,p]=u.pop(),h=await n.lstatPromise(A);if(!nBe(A,{globalList:r,ignoreLists:h.isDirectory()?null:p}))if(h.isDirectory()){let E=await n.readdirPromise(A),I=!1,v=!1;if(!e||A!==It.root)for(let R of E)I=I||R===".gitignore",v=v||R===".npmignore";let x=v?await rBe(n,A,".npmignore"):I?await rBe(n,A,".gitignore"):null,C=x!==null?[x].concat(p):p;nBe(A,{globalList:r,ignoreLists:p})&&(C=[...p,{accept:[],reject:["**/*"]}]);for(let R of E)u.push([K.resolve(A,R),C])}else(h.isFile()||h.isSymbolicLink())&&a.push(K.relative(It.root,A))}return a.sort()}async function rBe(t,e,r){let o={accept:[],reject:[]},a=await t.readFilePromise(K.join(e,r),"utf8");for(let n of a.split(/\n/g))lBe(o.reject,n,{cwd:e});return o}function pvt(t,{cwd:e}){let r=t[0]==="!";return r&&(t=t.slice(1)),t.match(/\.{0,1}\//)&&(t=K.resolve(e,t)),r&&(t=`!${t}`),t}function lBe(t,e,{cwd:r}){let o=e.trim();o===""||o[0]==="#"||t.push(pvt(o,{cwd:r}))}function nBe(t,{globalList:e,ignoreLists:r}){let o=UQ(t,e.accept);if(o!==0)return o===2;let a=UQ(t,e.reject);if(a!==0)return a===1;if(r!==null)for(let n of r){let u=UQ(t,n.accept);if(u!==0)return u===2;let A=UQ(t,n.reject);if(A!==0)return A===1}return!1}function UQ(t,e){let r=e,o=[];for(let a=0;a{await s5(a,{report:p},async()=>{p.reportJson({base:ue.fromPortablePath(a.cwd)});let h=await _Q(a);for(let E of h)p.reportInfo(null,ue.fromPortablePath(E)),p.reportJson({location:ue.fromPortablePath(E)});if(!this.dryRun){let E=await o5(a,h),I=oe.createWriteStream(u);E.pipe(I),await new Promise(v=>{I.on("finish",v)})}}),this.dryRun||(p.reportInfo(0,`Package archive generated in ${pe.pretty(r,u,pe.Type.PATH)}`),p.reportJson({output:ue.fromPortablePath(u)}))})).exitCode()}};function hvt(t,{workspace:e}){let r=t.replace("%s",gvt(e)).replace("%v",dvt(e));return ue.toPortablePath(r)}function gvt(t){return t.manifest.name!==null?G.slugifyIdent(t.manifest.name):"package"}function dvt(t){return t.manifest.version!==null?t.manifest.version:"unknown"}var mvt=["dependencies","devDependencies","peerDependencies"],yvt="workspace:",Evt=(t,e)=>{e.publishConfig&&(e.publishConfig.type&&(e.type=e.publishConfig.type),e.publishConfig.main&&(e.main=e.publishConfig.main),e.publishConfig.browser&&(e.browser=e.publishConfig.browser),e.publishConfig.module&&(e.module=e.publishConfig.module),e.publishConfig.exports&&(e.exports=e.publishConfig.exports),e.publishConfig.imports&&(e.imports=e.publishConfig.imports),e.publishConfig.bin&&(e.bin=e.publishConfig.bin));let r=t.project;for(let o of mvt)for(let a of t.manifest.getForScope(o).values()){let n=r.tryWorkspaceByDescriptor(a),u=G.parseRange(a.range);if(u.protocol===yvt)if(n===null){if(r.tryWorkspaceByIdent(a)===null)throw new Jt(21,`${G.prettyDescriptor(r.configuration,a)}: No local workspace found for this range`)}else{let A;G.areDescriptorsEqual(a,n.anchoredDescriptor)||u.selector==="*"?A=n.manifest.version??"0.0.0":u.selector==="~"||u.selector==="^"?A=`${u.selector}${n.manifest.version??"0.0.0"}`:A=u.selector;let p=o==="dependencies"?G.makeDescriptor(a,"unknown"):null,h=p!==null&&t.manifest.ensureDependencyMeta(p).optional?"optionalDependencies":o;e[h][G.stringifyIdent(a)]=A}}},Cvt={hooks:{beforeWorkspacePacking:Evt},commands:[DC]},wvt=Cvt;var mBe=ve("crypto"),yBe=Ze(dBe());async function Uvt(t,e,{access:r,tag:o,registry:a,gitHead:n}){let u=t.manifest.name,A=t.manifest.version,p=G.stringifyIdent(u),h=(0,mBe.createHash)("sha1").update(e).digest("hex"),E=yBe.default.fromData(e).toString(),I=r??EBe(t,u),v=await CBe(t),x=await CA.genPackageManifest(t),C=`${p}-${A}.tgz`,R=new URL(`${ac(a)}/${p}/-/${C}`);return{_id:p,_attachments:{[C]:{content_type:"application/octet-stream",data:e.toString("base64"),length:e.length}},name:p,access:I,"dist-tags":{[o]:A},versions:{[A]:{...x,_id:`${p}@${A}`,name:p,version:A,gitHead:n,dist:{shasum:h,integrity:E,tarball:R.toString()}}},readme:v}}async function _vt(t){try{let{stdout:e}=await Ur.execvp("git",["rev-parse","--revs-only","HEAD"],{cwd:t});return e.trim()===""?void 0:e.trim()}catch{return}}function EBe(t,e){let r=t.project.configuration;return t.manifest.publishConfig&&typeof t.manifest.publishConfig.access=="string"?t.manifest.publishConfig.access:r.get("npmPublishAccess")!==null?r.get("npmPublishAccess"):e.scope?"restricted":"public"}async function CBe(t){let e=ue.toPortablePath(`${t.cwd}/README.md`),r=t.manifest.name,a=`# ${G.stringifyIdent(r)} +`;try{a=await oe.readFilePromise(e,"utf8")}catch(n){if(n.code==="ENOENT")return a;throw n}return a}var A5={npmAlwaysAuth:{description:"URL of the selected npm registry (note: npm enterprise isn't supported)",type:"BOOLEAN",default:!1},npmAuthIdent:{description:"Authentication identity for the npm registry (_auth in npm and yarn v1)",type:"SECRET",default:null},npmAuthToken:{description:"Authentication token for the npm registry (_authToken in npm and yarn v1)",type:"SECRET",default:null}},wBe={npmAuditRegistry:{description:"Registry to query for audit reports",type:"STRING",default:null},npmPublishRegistry:{description:"Registry to push packages to",type:"STRING",default:null},npmRegistryServer:{description:"URL of the selected npm registry (note: npm enterprise isn't supported)",type:"STRING",default:"https://registry.yarnpkg.com"}},Hvt={configuration:{...A5,...wBe,npmScopes:{description:"Settings per package scope",type:"MAP",valueDefinition:{description:"",type:"SHAPE",properties:{...A5,...wBe}}},npmRegistries:{description:"Settings per registry",type:"MAP",normalizeKeys:ac,valueDefinition:{description:"",type:"SHAPE",properties:{...A5}}}},fetchers:[VB,tp],resolvers:[zB,JB,XB]},qvt=Hvt;var w5={};Vt(w5,{NpmAuditCommand:()=>bC,NpmInfoCommand:()=>xC,NpmLoginCommand:()=>kC,NpmLogoutCommand:()=>FC,NpmPublishCommand:()=>RC,NpmTagAddCommand:()=>NC,NpmTagListCommand:()=>TC,NpmTagRemoveCommand:()=>LC,NpmWhoamiCommand:()=>MC,default:()=>zvt,npmAuditTypes:()=>dv,npmAuditUtils:()=>HQ});Ge();Ge();qt();var m5=Ze($o());el();var dv={};Vt(dv,{Environment:()=>hv,Severity:()=>gv});var hv=(o=>(o.All="all",o.Production="production",o.Development="development",o))(hv||{}),gv=(n=>(n.Info="info",n.Low="low",n.Moderate="moderate",n.High="high",n.Critical="critical",n))(gv||{});var HQ={};Vt(HQ,{allSeverities:()=>SC,getPackages:()=>d5,getReportTree:()=>h5,getSeverityInclusions:()=>p5,getTopLevelDependencies:()=>g5});Ge();var IBe=Ze(Jn());var SC=["info","low","moderate","high","critical"];function p5(t){if(typeof t>"u")return new Set(SC);let e=SC.indexOf(t),r=SC.slice(e);return new Set(r)}function h5(t){let e={},r={children:e};for(let[o,a]of He.sortMap(Object.entries(t),n=>n[0]))for(let n of He.sortMap(a,u=>`${u.id}`))e[`${o}/${n.id}`]={value:pe.tuple(pe.Type.IDENT,G.parseIdent(o)),children:{ID:typeof n.id<"u"&&{label:"ID",value:pe.tuple(pe.Type.ID,n.id)},Issue:{label:"Issue",value:pe.tuple(pe.Type.NO_HINT,n.title)},URL:typeof n.url<"u"&&{label:"URL",value:pe.tuple(pe.Type.URL,n.url)},Severity:{label:"Severity",value:pe.tuple(pe.Type.NO_HINT,n.severity)},"Vulnerable Versions":{label:"Vulnerable Versions",value:pe.tuple(pe.Type.RANGE,n.vulnerable_versions)},"Tree Versions":{label:"Tree Versions",children:[...n.versions].sort(IBe.default.compare).map(u=>({value:pe.tuple(pe.Type.REFERENCE,u)}))},Dependents:{label:"Dependents",children:He.sortMap(n.dependents,u=>G.stringifyLocator(u)).map(u=>({value:pe.tuple(pe.Type.LOCATOR,u)}))}}};return r}function g5(t,e,{all:r,environment:o}){let a=[],n=r?t.workspaces:[e],u=["all","production"].includes(o),A=["all","development"].includes(o);for(let p of n)for(let h of p.anchoredPackage.dependencies.values())(p.manifest.devDependencies.has(h.identHash)?!A:!u)||a.push({workspace:p,dependency:h});return a}function d5(t,e,{recursive:r}){let o=new Map,a=new Set,n=[],u=(A,p)=>{let h=t.storedResolutions.get(p.descriptorHash);if(typeof h>"u")throw new Error("Assertion failed: The resolution should have been registered");if(!a.has(h))a.add(h);else return;let E=t.storedPackages.get(h);if(typeof E>"u")throw new Error("Assertion failed: The package should have been registered");if(G.ensureDevirtualizedLocator(E).reference.startsWith("npm:")&&E.version!==null){let v=G.stringifyIdent(E),x=He.getMapWithDefault(o,v);He.getArrayWithDefault(x,E.version).push(A)}if(r)for(let v of E.dependencies.values())n.push([E,v])};for(let{workspace:A,dependency:p}of e)n.push([A.anchoredLocator,p]);for(;n.length>0;){let[A,p]=n.shift();u(A,p)}return o}var bC=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("-A,--all",!1,{description:"Audit dependencies from all workspaces"});this.recursive=ge.Boolean("-R,--recursive",!1,{description:"Audit transitive dependencies as well"});this.environment=ge.String("--environment","all",{description:"Which environments to cover",validator:Js(hv)});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.noDeprecations=ge.Boolean("--no-deprecations",!1,{description:"Don't warn about deprecated packages"});this.severity=ge.String("--severity","info",{description:"Minimal severity requested for packages to be displayed",validator:Js(gv)});this.excludes=ge.Array("--exclude",[],{description:"Array of glob patterns of packages to exclude from audit"});this.ignores=ge.Array("--ignore",[],{description:"Array of glob patterns of advisory ID's to ignore in the audit report"})}static{this.paths=[["npm","audit"]]}static{this.usage=it.Usage({description:"perform a vulnerability audit against the installed packages",details:` + This command checks for known security reports on the packages you use. The reports are by default extracted from the npm registry, and may or may not be relevant to your actual program (not all vulnerabilities affect all code paths). + + For consistency with our other commands the default is to only check the direct dependencies for the active workspace. To extend this search to all workspaces, use \`-A,--all\`. To extend this search to both direct and transitive dependencies, use \`-R,--recursive\`. + + Applying the \`--severity\` flag will limit the audit table to vulnerabilities of the corresponding severity and above. Valid values are ${SC.map(r=>`\`${r}\``).join(", ")}. + + If the \`--json\` flag is set, Yarn will print the output exactly as received from the registry. Regardless of this flag, the process will exit with a non-zero exit code if a report is found for the selected packages. + + If certain packages produce false positives for a particular environment, the \`--exclude\` flag can be used to exclude any number of packages from the audit. This can also be set in the configuration file with the \`npmAuditExcludePackages\` option. + + If particular advisories are needed to be ignored, the \`--ignore\` flag can be used with Advisory ID's to ignore any number of advisories in the audit report. This can also be set in the configuration file with the \`npmAuditIgnoreAdvisories\` option. + + To understand the dependency tree requiring vulnerable packages, check the raw report with the \`--json\` flag or use \`yarn why package\` to get more information as to who depends on them. + `,examples:[["Checks for known security issues with the installed packages. The output is a list of known issues.","yarn npm audit"],["Audit dependencies in all workspaces","yarn npm audit --all"],["Limit auditing to `dependencies` (excludes `devDependencies`)","yarn npm audit --environment production"],["Show audit report as valid JSON","yarn npm audit --json"],["Audit all direct and transitive dependencies","yarn npm audit --recursive"],["Output moderate (or more severe) vulnerabilities","yarn npm audit --severity moderate"],["Exclude certain packages","yarn npm audit --exclude package1 --exclude package2"],["Ignore specific advisories","yarn npm audit --ignore 1234567 --ignore 7654321"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let n=g5(o,a,{all:this.all,environment:this.environment}),u=d5(o,n,{recursive:this.recursive}),A=Array.from(new Set([...r.get("npmAuditExcludePackages"),...this.excludes])),p=Object.create(null);for(let[L,U]of u)A.some(z=>m5.default.isMatch(L,z))||(p[L]=[...U.keys()]);let h=Zn.getAuditRegistry({configuration:r}),E,I=await AA.start({configuration:r,stdout:this.context.stdout},async()=>{let L=Zr.post("/-/npm/v1/security/advisories/bulk",p,{authType:Zr.AuthType.BEST_EFFORT,configuration:r,jsonResponse:!0,registry:h}),U=this.noDeprecations?[]:await Promise.all(Array.from(Object.entries(p),async([te,ae])=>{let le=await Zr.getPackageMetadata(G.parseIdent(te),{project:o});return He.mapAndFilter(ae,ce=>{let{deprecated:Ce}=le.versions[ce];return Ce?[te,ce,Ce]:He.mapAndFilter.skip})})),z=await L;for(let[te,ae,le]of U.flat(1))Object.hasOwn(z,te)&&z[te].some(ce=>Lr.satisfiesWithPrereleases(ae,ce.vulnerable_versions))||(z[te]??=[],z[te].push({id:`${te} (deprecation)`,title:le.trim()||"This package has been deprecated.",severity:"moderate",vulnerable_versions:ae}));E=z});if(I.hasErrors())return I.exitCode();let v=p5(this.severity),x=Array.from(new Set([...r.get("npmAuditIgnoreAdvisories"),...this.ignores])),C=Object.create(null);for(let[L,U]of Object.entries(E)){let z=U.filter(te=>!m5.default.isMatch(`${te.id}`,x)&&v.has(te.severity));z.length>0&&(C[L]=z.map(te=>{let ae=u.get(L);if(typeof ae>"u")throw new Error("Assertion failed: Expected the registry to only return packages that were requested");let le=[...ae.keys()].filter(Ce=>Lr.satisfiesWithPrereleases(Ce,te.vulnerable_versions)),ce=new Map;for(let Ce of le)for(let de of ae.get(Ce))ce.set(de.locatorHash,de);return{...te,versions:le,dependents:[...ce.values()]}}))}let R=Object.keys(C).length>0;return R?(fs.emitTree(h5(C),{configuration:r,json:this.json,stdout:this.context.stdout,separators:2}),1):(await Rt.start({configuration:r,includeFooter:!1,json:this.json,stdout:this.context.stdout},async L=>{L.reportInfo(1,"No audit suggestions")}),R?1:0)}};Ge();Ge();Pt();qt();var y5=Ze(Jn()),E5=ve("util"),xC=class extends ut{constructor(){super(...arguments);this.fields=ge.String("-f,--fields",{description:"A comma-separated list of manifest fields that should be displayed"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.packages=ge.Rest()}static{this.paths=[["npm","info"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"show information about a package",details:"\n This command fetches information about a package from the npm registry and prints it in a tree format.\n\n The package does not have to be installed locally, but needs to have been published (in particular, local changes will be ignored even for workspaces).\n\n Append `@` to the package argument to provide information specific to the latest version that satisfies the range or to the corresponding tagged version. If the range is invalid or if there is no version satisfying the range, the command will print a warning and fall back to the latest version.\n\n If the `-f,--fields` option is set, it's a comma-separated list of fields which will be used to only display part of the package information.\n\n By default, this command won't return the `dist`, `readme`, and `users` fields, since they are often very long. To explicitly request those fields, explicitly list them with the `--fields` flag or request the output in JSON mode.\n ",examples:[["Show all available information about react (except the `dist`, `readme`, and `users` fields)","yarn npm info react"],["Show all available information about react as valid JSON (including the `dist`, `readme`, and `users` fields)","yarn npm info react --json"],["Show all available information about react@16.12.0","yarn npm info react@16.12.0"],["Show all available information about react@next","yarn npm info react@next"],["Show the description of react","yarn npm info react --fields description"],["Show all available versions of react","yarn npm info react --fields versions"],["Show the readme of react","yarn npm info react --fields readme"],["Show a few fields of react","yarn npm info react --fields homepage,repository"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd),a=typeof this.fields<"u"?new Set(["name",...this.fields.split(/\s*,\s*/)]):null,n=[],u=!1,A=await Rt.start({configuration:r,includeFooter:!1,json:this.json,stdout:this.context.stdout},async p=>{for(let h of this.packages){let E;if(h==="."){let ae=o.topLevelWorkspace;if(!ae.manifest.name)throw new st(`Missing ${pe.pretty(r,"name",pe.Type.CODE)} field in ${ue.fromPortablePath(K.join(ae.cwd,dr.manifest))}`);E=G.makeDescriptor(ae.manifest.name,"unknown")}else E=G.parseDescriptor(h);let I=Zr.getIdentUrl(E),v=C5(await Zr.get(I,{configuration:r,ident:E,jsonResponse:!0,customErrorMessage:Zr.customPackageError})),x=Object.keys(v.versions).sort(y5.default.compareLoose),R=v["dist-tags"].latest||x[x.length-1],L=Lr.validRange(E.range);if(L){let ae=y5.default.maxSatisfying(x,L);ae!==null?R=ae:(p.reportWarning(0,`Unmet range ${G.prettyRange(r,E.range)}; falling back to the latest version`),u=!0)}else Object.hasOwn(v["dist-tags"],E.range)?R=v["dist-tags"][E.range]:E.range!=="unknown"&&(p.reportWarning(0,`Unknown tag ${G.prettyRange(r,E.range)}; falling back to the latest version`),u=!0);let U=v.versions[R],z={...v,...U,version:R,versions:x},te;if(a!==null){te={};for(let ae of a){let le=z[ae];if(typeof le<"u")te[ae]=le;else{p.reportWarning(1,`The ${pe.pretty(r,ae,pe.Type.CODE)} field doesn't exist inside ${G.prettyIdent(r,E)}'s information`),u=!0;continue}}}else this.json||(delete z.dist,delete z.readme,delete z.users),te=z;p.reportJson(te),this.json||n.push(te)}});E5.inspect.styles.name="cyan";for(let p of n)(p!==n[0]||u)&&this.context.stdout.write(` +`),this.context.stdout.write(`${(0,E5.inspect)(p,{depth:1/0,colors:!0,compact:!1})} +`);return A.exitCode()}};function C5(t){if(Array.isArray(t)){let e=[];for(let r of t)r=C5(r),r&&e.push(r);return e}else if(typeof t=="object"&&t!==null){let e={};for(let r of Object.keys(t)){if(r.startsWith("_"))continue;let o=C5(t[r]);o&&(e[r]=o)}return e}else return t||null}Ge();Ge();qt();var BBe=Ze(J1()),kC=class extends ut{constructor(){super(...arguments);this.scope=ge.String("-s,--scope",{description:"Login to the registry configured for a given scope"});this.publish=ge.Boolean("--publish",!1,{description:"Login to the publish registry"});this.alwaysAuth=ge.Boolean("--always-auth",{description:"Set the npmAlwaysAuth configuration"})}static{this.paths=[["npm","login"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"store new login info to access the npm registry",details:"\n This command will ask you for your username, password, and 2FA One-Time-Password (when it applies). It will then modify your local configuration (in your home folder, never in the project itself) to reference the new tokens thus generated.\n\n Adding the `-s,--scope` flag will cause the authentication to be done against whatever registry is configured for the associated scope (see also `npmScopes`).\n\n Adding the `--publish` flag will cause the authentication to be done against the registry used when publishing the package (see also `publishConfig.registry` and `npmPublishRegistry`).\n ",examples:[["Login to the default registry","yarn npm login"],["Login to the registry linked to the @my-scope registry","yarn npm login --scope my-scope"],["Login to the publish registry for the current package","yarn npm login --publish"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=await qQ({configuration:r,cwd:this.context.cwd,publish:this.publish,scope:this.scope});return(await Rt.start({configuration:r,stdout:this.context.stdout,includeFooter:!1},async n=>{let u=await Yvt({configuration:r,registry:o,report:n,stdin:this.context.stdin,stdout:this.context.stdout}),A=await jvt(o,u,r);return await Gvt(o,A,{alwaysAuth:this.alwaysAuth,scope:this.scope}),n.reportInfo(0,"Successfully logged in")})).exitCode()}};async function qQ({scope:t,publish:e,configuration:r,cwd:o}){return t&&e?Zn.getScopeRegistry(t,{configuration:r,type:Zn.RegistryType.PUBLISH_REGISTRY}):t?Zn.getScopeRegistry(t,{configuration:r}):e?Zn.getPublishRegistry((await _y(r,o)).manifest,{configuration:r}):Zn.getDefaultRegistry({configuration:r})}async function jvt(t,e,r){let o=`/-/user/org.couchdb.user:${encodeURIComponent(e.name)}`,a={_id:`org.couchdb.user:${e.name}`,name:e.name,password:e.password,type:"user",roles:[],date:new Date().toISOString()},n={attemptedAs:e.name,configuration:r,registry:t,jsonResponse:!0,authType:Zr.AuthType.NO_AUTH};try{return(await Zr.put(o,a,n)).token}catch(E){if(!(E.originalError?.name==="HTTPError"&&E.originalError?.response.statusCode===409))throw E}let u={...n,authType:Zr.AuthType.NO_AUTH,headers:{authorization:`Basic ${Buffer.from(`${e.name}:${e.password}`).toString("base64")}`}},A=await Zr.get(o,u);for(let[E,I]of Object.entries(A))(!a[E]||E==="roles")&&(a[E]=I);let p=`${o}/-rev/${a._rev}`;return(await Zr.put(p,a,u)).token}async function Gvt(t,e,{alwaysAuth:r,scope:o}){let a=u=>A=>{let p=He.isIndexableObject(A)?A:{},h=p[u],E=He.isIndexableObject(h)?h:{};return{...p,[u]:{...E,...r!==void 0?{npmAlwaysAuth:r}:{},npmAuthToken:e}}},n=o?{npmScopes:a(o)}:{npmRegistries:a(t)};return await Ke.updateHomeConfiguration(n)}async function Yvt({configuration:t,registry:e,report:r,stdin:o,stdout:a}){r.reportInfo(0,`Logging in to ${pe.pretty(t,e,pe.Type.URL)}`);let n=!1;if(e.match(/^https:\/\/npm\.pkg\.github\.com(\/|$)/)&&(r.reportInfo(0,"You seem to be using the GitHub Package Registry. Tokens must be generated with the 'repo', 'write:packages', and 'read:packages' permissions."),n=!0),r.reportSeparator(),t.env.YARN_IS_TEST_ENV)return{name:t.env.YARN_INJECT_NPM_USER||"",password:t.env.YARN_INJECT_NPM_PASSWORD||""};let u=await(0,BBe.prompt)([{type:"input",name:"name",message:"Username:",required:!0,onCancel:()=>process.exit(130),stdin:o,stdout:a},{type:"password",name:"password",message:n?"Token:":"Password:",required:!0,onCancel:()=>process.exit(130),stdin:o,stdout:a}]);return r.reportSeparator(),u}Ge();Ge();qt();var QC=new Set(["npmAuthIdent","npmAuthToken"]),FC=class extends ut{constructor(){super(...arguments);this.scope=ge.String("-s,--scope",{description:"Logout of the registry configured for a given scope"});this.publish=ge.Boolean("--publish",!1,{description:"Logout of the publish registry"});this.all=ge.Boolean("-A,--all",!1,{description:"Logout of all registries"})}static{this.paths=[["npm","logout"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"logout of the npm registry",details:"\n This command will log you out by modifying your local configuration (in your home folder, never in the project itself) to delete all credentials linked to a registry.\n\n Adding the `-s,--scope` flag will cause the deletion to be done against whatever registry is configured for the associated scope (see also `npmScopes`).\n\n Adding the `--publish` flag will cause the deletion to be done against the registry used when publishing the package (see also `publishConfig.registry` and `npmPublishRegistry`).\n\n Adding the `-A,--all` flag will cause the deletion to be done against all registries and scopes.\n ",examples:[["Logout of the default registry","yarn npm logout"],["Logout of the @my-scope scope","yarn npm logout --scope my-scope"],["Logout of the publish registry for the current package","yarn npm logout --publish"],["Logout of all registries","yarn npm logout --all"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o=async()=>{let n=await qQ({configuration:r,cwd:this.context.cwd,publish:this.publish,scope:this.scope}),u=await Ke.find(this.context.cwd,this.context.plugins),A=G.makeIdent(this.scope??null,"pkg");return!Zn.getAuthConfiguration(n,{configuration:u,ident:A}).get("npmAuthToken")};return(await Rt.start({configuration:r,stdout:this.context.stdout},async n=>{if(this.all&&(await Kvt(),n.reportInfo(0,"Successfully logged out from everything")),this.scope){await vBe("npmScopes",this.scope),await o()?n.reportInfo(0,`Successfully logged out from ${this.scope}`):n.reportWarning(0,"Scope authentication settings removed, but some other ones settings still apply to it");return}let u=await qQ({configuration:r,cwd:this.context.cwd,publish:this.publish});await vBe("npmRegistries",u),await o()?n.reportInfo(0,`Successfully logged out from ${u}`):n.reportWarning(0,"Registry authentication settings removed, but some other ones settings still apply to it")})).exitCode()}};function Wvt(t,e){let r=t[e];if(!He.isIndexableObject(r))return!1;let o=new Set(Object.keys(r));if([...QC].every(n=>!o.has(n)))return!1;for(let n of QC)o.delete(n);if(o.size===0)return t[e]=void 0,!0;let a={...r};for(let n of QC)delete a[n];return t[e]=a,!0}async function Kvt(){let t=e=>{let r=!1,o=He.isIndexableObject(e)?{...e}:{};o.npmAuthToken&&(delete o.npmAuthToken,r=!0);for(let a of Object.keys(o))Wvt(o,a)&&(r=!0);if(Object.keys(o).length!==0)return r?o:e};return await Ke.updateHomeConfiguration({npmRegistries:t,npmScopes:t})}async function vBe(t,e){return await Ke.updateHomeConfiguration({[t]:r=>{let o=He.isIndexableObject(r)?r:{};if(!Object.hasOwn(o,e))return r;let a=o[e],n=He.isIndexableObject(a)?a:{},u=new Set(Object.keys(n));if([...QC].every(p=>!u.has(p)))return r;for(let p of QC)u.delete(p);if(u.size===0)return Object.keys(o).length===1?void 0:{...o,[e]:void 0};let A={};for(let p of QC)A[p]=void 0;return{...o,[e]:{...n,...A}}}})}Ge();qt();var RC=class extends ut{constructor(){super(...arguments);this.access=ge.String("--access",{description:"The access for the published package (public or restricted)"});this.tag=ge.String("--tag","latest",{description:"The tag on the registry that the package should be attached to"});this.tolerateRepublish=ge.Boolean("--tolerate-republish",!1,{description:"Warn and exit when republishing an already existing version of a package"});this.otp=ge.String("--otp",{description:"The OTP token to use with the command"})}static{this.paths=[["npm","publish"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"publish the active workspace to the npm registry",details:'\n This command will pack the active workspace into a fresh archive and upload it to the npm registry.\n\n The package will by default be attached to the `latest` tag on the registry, but this behavior can be overridden by using the `--tag` option.\n\n Note that for legacy reasons scoped packages are by default published with an access set to `restricted` (aka "private packages"). This requires you to register for a paid npm plan. In case you simply wish to publish a public scoped package to the registry (for free), just add the `--access public` flag. This behavior can be enabled by default through the `npmPublishAccess` settings.\n ',examples:[["Publish the active workspace","yarn npm publish"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);if(a.manifest.private)throw new st("Private workspaces cannot be published");if(a.manifest.name===null||a.manifest.version===null)throw new st("Workspaces must have valid names and versions to be published on an external registry");await o.restoreInstallState();let n=a.manifest.name,u=a.manifest.version,A=Zn.getPublishRegistry(a.manifest,{configuration:r});return(await Rt.start({configuration:r,stdout:this.context.stdout},async h=>{if(this.tolerateRepublish)try{let E=await Zr.get(Zr.getIdentUrl(n),{configuration:r,registry:A,ident:n,jsonResponse:!0});if(!Object.hasOwn(E,"versions"))throw new Jt(15,'Registry returned invalid data for - missing "versions" field');if(Object.hasOwn(E.versions,u)){h.reportWarning(0,`Registry already knows about version ${u}; skipping.`);return}}catch(E){if(E.originalError?.response?.statusCode!==404)throw E}await An.maybeExecuteWorkspaceLifecycleScript(a,"prepublish",{report:h}),await CA.prepareForPack(a,{report:h},async()=>{let E=await CA.genPackList(a);for(let R of E)h.reportInfo(null,R);let I=await CA.genPackStream(a,E),v=await He.bufferStream(I),x=await PC.getGitHead(a.cwd),C=await PC.makePublishBody(a,v,{access:this.access,tag:this.tag,registry:A,gitHead:x});await Zr.put(Zr.getIdentUrl(n),C,{configuration:r,registry:A,ident:n,otp:this.otp,jsonResponse:!0})}),h.reportInfo(0,"Package archive published")})).exitCode()}};Ge();qt();var DBe=Ze(Jn());Ge();Pt();qt();var TC=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.package=ge.String({required:!1})}static{this.paths=[["npm","tag","list"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"list all dist-tags of a package",details:` + This command will list all tags of a package from the npm registry. + + If the package is not specified, Yarn will default to the current workspace. + `,examples:[["List all tags of package `my-pkg`","yarn npm tag list my-pkg"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n;if(typeof this.package<"u")n=G.parseIdent(this.package);else{if(!a)throw new sr(o.cwd,this.context.cwd);if(!a.manifest.name)throw new st(`Missing 'name' field in ${ue.fromPortablePath(K.join(a.cwd,dr.manifest))}`);n=a.manifest.name}let u=await mv(n,r),p={children:He.sortMap(Object.entries(u),([h])=>h).map(([h,E])=>({value:pe.tuple(pe.Type.RESOLUTION,{descriptor:G.makeDescriptor(n,h),locator:G.makeLocator(n,E)})}))};return fs.emitTree(p,{configuration:r,json:this.json,stdout:this.context.stdout})}};async function mv(t,e){let r=`/-/package${Zr.getIdentUrl(t)}/dist-tags`;return Zr.get(r,{configuration:e,ident:t,jsonResponse:!0,customErrorMessage:Zr.customPackageError})}var NC=class extends ut{constructor(){super(...arguments);this.package=ge.String();this.tag=ge.String()}static{this.paths=[["npm","tag","add"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"add a tag for a specific version of a package",details:` + This command will add a tag to the npm registry for a specific version of a package. If the tag already exists, it will be overwritten. + `,examples:[["Add a `beta` tag for version `2.3.4-beta.4` of package `my-pkg`","yarn npm tag add my-pkg@2.3.4-beta.4 beta"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);let n=G.parseDescriptor(this.package,!0),u=n.range;if(!DBe.default.valid(u))throw new st(`The range ${pe.pretty(r,n.range,pe.Type.RANGE)} must be a valid semver version`);let A=Zn.getPublishRegistry(a.manifest,{configuration:r}),p=pe.pretty(r,n,pe.Type.IDENT),h=pe.pretty(r,u,pe.Type.RANGE),E=pe.pretty(r,this.tag,pe.Type.CODE);return(await Rt.start({configuration:r,stdout:this.context.stdout},async v=>{let x=await mv(n,r);Object.hasOwn(x,this.tag)&&x[this.tag]===u&&v.reportWarning(0,`Tag ${E} is already set to version ${h}`);let C=`/-/package${Zr.getIdentUrl(n)}/dist-tags/${encodeURIComponent(this.tag)}`;await Zr.put(C,u,{configuration:r,registry:A,ident:n,jsonRequest:!0,jsonResponse:!0}),v.reportInfo(0,`Tag ${E} added to version ${h} of package ${p}`)})).exitCode()}};Ge();qt();var LC=class extends ut{constructor(){super(...arguments);this.package=ge.String();this.tag=ge.String()}static{this.paths=[["npm","tag","remove"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"remove a tag from a package",details:` + This command will remove a tag from a package from the npm registry. + `,examples:[["Remove the `beta` tag from package `my-pkg`","yarn npm tag remove my-pkg beta"]]})}async execute(){if(this.tag==="latest")throw new st("The 'latest' tag cannot be removed.");let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);let n=G.parseIdent(this.package),u=Zn.getPublishRegistry(a.manifest,{configuration:r}),A=pe.pretty(r,this.tag,pe.Type.CODE),p=pe.pretty(r,n,pe.Type.IDENT),h=await mv(n,r);if(!Object.hasOwn(h,this.tag))throw new st(`${A} is not a tag of package ${p}`);return(await Rt.start({configuration:r,stdout:this.context.stdout},async I=>{let v=`/-/package${Zr.getIdentUrl(n)}/dist-tags/${encodeURIComponent(this.tag)}`;await Zr.del(v,{configuration:r,registry:u,ident:n,jsonResponse:!0}),I.reportInfo(0,`Tag ${A} removed from package ${p}`)})).exitCode()}};Ge();Ge();qt();var MC=class extends ut{constructor(){super(...arguments);this.scope=ge.String("-s,--scope",{description:"Print username for the registry configured for a given scope"});this.publish=ge.Boolean("--publish",!1,{description:"Print username for the publish registry"})}static{this.paths=[["npm","whoami"]]}static{this.usage=it.Usage({category:"Npm-related commands",description:"display the name of the authenticated user",details:"\n Print the username associated with the current authentication settings to the standard output.\n\n When using `-s,--scope`, the username printed will be the one that matches the authentication settings of the registry associated with the given scope (those settings can be overriden using the `npmRegistries` map, and the registry associated with the scope is configured via the `npmScopes` map).\n\n When using `--publish`, the registry we'll select will by default be the one used when publishing packages (`publishConfig.registry` or `npmPublishRegistry` if available, otherwise we'll fallback to the regular `npmRegistryServer`).\n ",examples:[["Print username for the default registry","yarn npm whoami"],["Print username for the registry on a given scope","yarn npm whoami --scope company"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),o;return this.scope&&this.publish?o=Zn.getScopeRegistry(this.scope,{configuration:r,type:Zn.RegistryType.PUBLISH_REGISTRY}):this.scope?o=Zn.getScopeRegistry(this.scope,{configuration:r}):this.publish?o=Zn.getPublishRegistry((await _y(r,this.context.cwd)).manifest,{configuration:r}):o=Zn.getDefaultRegistry({configuration:r}),(await Rt.start({configuration:r,stdout:this.context.stdout},async n=>{let u;try{u=await Zr.get("/-/whoami",{configuration:r,registry:o,authType:Zr.AuthType.ALWAYS_AUTH,jsonResponse:!0,ident:this.scope?G.makeIdent(this.scope,""):void 0})}catch(A){if(A.response?.statusCode===401||A.response?.statusCode===403){n.reportError(41,"Authentication failed - your credentials may have expired");return}else throw A}n.reportInfo(0,u.username)})).exitCode()}};var Vvt={configuration:{npmPublishAccess:{description:"Default access of the published packages",type:"STRING",default:null},npmAuditExcludePackages:{description:"Array of glob patterns of packages to exclude from npm audit",type:"STRING",default:[],isArray:!0},npmAuditIgnoreAdvisories:{description:"Array of glob patterns of advisory IDs to exclude from npm audit",type:"STRING",default:[],isArray:!0}},commands:[bC,xC,kC,FC,RC,NC,TC,LC,MC]},zvt=Vvt;var b5={};Vt(b5,{PatchCommand:()=>jC,PatchCommitCommand:()=>qC,PatchFetcher:()=>Iv,PatchResolver:()=>Bv,default:()=>pDt,patchUtils:()=>pd});Ge();Ge();Pt();nA();var pd={};Vt(pd,{applyPatchFile:()=>GQ,diffFolders:()=>P5,ensureUnpatchedDescriptor:()=>I5,ensureUnpatchedLocator:()=>WQ,extractPackageToDisk:()=>D5,extractPatchFlags:()=>FBe,isParentRequired:()=>v5,isPatchDescriptor:()=>YQ,isPatchLocator:()=>$h,loadPatchFiles:()=>wv,makeDescriptor:()=>KQ,makeLocator:()=>B5,makePatchHash:()=>S5,parseDescriptor:()=>Ev,parseLocator:()=>Cv,parsePatchFile:()=>yv,unpatchDescriptor:()=>uDt,unpatchLocator:()=>ADt});Ge();Pt();Ge();Pt();var Jvt=/^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@.*/;function OC(t){return K.relative(It.root,K.resolve(It.root,ue.toPortablePath(t)))}function Xvt(t){let e=t.trim().match(Jvt);if(!e)throw new Error(`Bad header line: '${t}'`);return{original:{start:Math.max(Number(e[1]),1),length:Number(e[3]||1)},patched:{start:Math.max(Number(e[4]),1),length:Number(e[6]||1)}}}var Zvt=420,$vt=493;var PBe=()=>({semverExclusivity:null,diffLineFromPath:null,diffLineToPath:null,oldMode:null,newMode:null,deletedFileMode:null,newFileMode:null,renameFrom:null,renameTo:null,beforeHash:null,afterHash:null,fromPath:null,toPath:null,hunks:null}),eDt=t=>({header:Xvt(t),parts:[]}),tDt={"@":"header","-":"deletion","+":"insertion"," ":"context","\\":"pragma",undefined:"context"};function rDt(t){let e=[],r=PBe(),o="parsing header",a=null,n=null;function u(){a&&(n&&(a.parts.push(n),n=null),r.hunks.push(a),a=null)}function A(){u(),e.push(r),r=PBe()}for(let p=0;p0?"patch":"mode change",z=null;switch(U){case"rename":{if(!E||!I)throw new Error("Bad parser state: rename from & to not given");e.push({type:"rename",semverExclusivity:o,fromPath:OC(E),toPath:OC(I)}),z=I}break;case"file deletion":{let te=a||C;if(!te)throw new Error("Bad parse state: no path given for file deletion");e.push({type:"file deletion",semverExclusivity:o,hunk:L&&L[0]||null,path:OC(te),mode:jQ(p),hash:v})}break;case"file creation":{let te=n||R;if(!te)throw new Error("Bad parse state: no path given for file creation");e.push({type:"file creation",semverExclusivity:o,hunk:L&&L[0]||null,path:OC(te),mode:jQ(h),hash:x})}break;case"patch":case"mode change":z=R||n;break;default:He.assertNever(U);break}z&&u&&A&&u!==A&&e.push({type:"mode change",semverExclusivity:o,path:OC(z),oldMode:jQ(u),newMode:jQ(A)}),z&&L&&L.length&&e.push({type:"patch",semverExclusivity:o,path:OC(z),hunks:L,beforeHash:v,afterHash:x})}if(e.length===0)throw new Error("Unable to parse patch file: No changes found. Make sure the patch is a valid UTF8 encoded string");return e}function jQ(t){let e=parseInt(t,8)&511;if(e!==Zvt&&e!==$vt)throw new Error(`Unexpected file mode string: ${t}`);return e}function yv(t){let e=t.split(/\n/g);return e[e.length-1]===""&&e.pop(),nDt(rDt(e))}function iDt(t){let e=0,r=0;for(let{type:o,lines:a}of t.parts)switch(o){case"context":r+=a.length,e+=a.length;break;case"deletion":e+=a.length;break;case"insertion":r+=a.length;break;default:He.assertNever(o);break}if(e!==t.header.original.length||r!==t.header.patched.length){let o=a=>a<0?a:`+${a}`;throw new Error(`hunk header integrity check failed (expected @@ ${o(t.header.original.length)} ${o(t.header.patched.length)} @@, got @@ ${o(e)} ${o(r)} @@)`)}}Ge();Pt();var UC=class extends Error{constructor(r,o){super(`Cannot apply hunk #${r+1}`);this.hunk=o}};async function _C(t,e,r){let o=await t.lstatPromise(e),a=await r();typeof a<"u"&&(e=a),await t.lutimesPromise(e,o.atime,o.mtime)}async function GQ(t,{baseFs:e=new Tn,dryRun:r=!1,version:o=null}={}){for(let a of t)if(!(a.semverExclusivity!==null&&o!==null&&!Lr.satisfiesWithPrereleases(o,a.semverExclusivity)))switch(a.type){case"file deletion":if(r){if(!e.existsSync(a.path))throw new Error(`Trying to delete a file that doesn't exist: ${a.path}`)}else await _C(e,K.dirname(a.path),async()=>{await e.unlinkPromise(a.path)});break;case"rename":if(r){if(!e.existsSync(a.fromPath))throw new Error(`Trying to move a file that doesn't exist: ${a.fromPath}`)}else await _C(e,K.dirname(a.fromPath),async()=>{await _C(e,K.dirname(a.toPath),async()=>{await _C(e,a.fromPath,async()=>(await e.movePromise(a.fromPath,a.toPath),a.toPath))})});break;case"file creation":if(r){if(e.existsSync(a.path))throw new Error(`Trying to create a file that already exists: ${a.path}`)}else{let n=a.hunk?a.hunk.parts[0].lines.join(` +`)+(a.hunk.parts[0].noNewlineAtEndOfFile?"":` +`):"";await e.mkdirpPromise(K.dirname(a.path),{chmod:493,utimes:[Bi.SAFE_TIME,Bi.SAFE_TIME]}),await e.writeFilePromise(a.path,n,{mode:a.mode}),await e.utimesPromise(a.path,Bi.SAFE_TIME,Bi.SAFE_TIME)}break;case"patch":await _C(e,a.path,async()=>{await aDt(a,{baseFs:e,dryRun:r})});break;case"mode change":{let u=(await e.statPromise(a.path)).mode;if(SBe(a.newMode)!==SBe(u))continue;await _C(e,a.path,async()=>{await e.chmodPromise(a.path,a.newMode)})}break;default:He.assertNever(a);break}}function SBe(t){return(t&64)>0}function bBe(t){return t.replace(/\s+$/,"")}function oDt(t,e){return bBe(t)===bBe(e)}async function aDt({hunks:t,path:e},{baseFs:r,dryRun:o=!1}){let a=await r.statSync(e).mode,u=(await r.readFileSync(e,"utf8")).split(/\n/),A=[],p=0,h=0;for(let I of t){let v=Math.max(h,I.header.patched.start+p),x=Math.max(0,v-h),C=Math.max(0,u.length-v-I.header.original.length),R=Math.max(x,C),L=0,U=0,z=null;for(;L<=R;){if(L<=x&&(U=v-L,z=xBe(I,u,U),z!==null)){L=-L;break}if(L<=C&&(U=v+L,z=xBe(I,u,U),z!==null))break;L+=1}if(z===null)throw new UC(t.indexOf(I),I);A.push(z),p+=L,h=U+I.header.original.length}if(o)return;let E=0;for(let I of A)for(let v of I)switch(v.type){case"splice":{let x=v.index+E;u.splice(x,v.numToDelete,...v.linesToInsert),E+=v.linesToInsert.length-v.numToDelete}break;case"pop":u.pop();break;case"push":u.push(v.line);break;default:He.assertNever(v);break}await r.writeFilePromise(e,u.join(` +`),{mode:a})}function xBe(t,e,r){let o=[];for(let a of t.parts)switch(a.type){case"context":case"deletion":{for(let n of a.lines){let u=e[r];if(u==null||!oDt(u,n))return null;r+=1}a.type==="deletion"&&(o.push({type:"splice",index:r-a.lines.length,numToDelete:a.lines.length,linesToInsert:[]}),a.noNewlineAtEndOfFile&&o.push({type:"push",line:""}))}break;case"insertion":o.push({type:"splice",index:r,numToDelete:0,linesToInsert:a.lines}),a.noNewlineAtEndOfFile&&o.push({type:"pop"});break;default:He.assertNever(a.type);break}return o}var cDt=/^builtin<([^>]+)>$/;function HC(t,e){let{protocol:r,source:o,selector:a,params:n}=G.parseRange(t);if(r!=="patch:")throw new Error("Invalid patch range");if(o===null)throw new Error("Patch locators must explicitly define their source");let u=a?a.split(/&/).map(E=>ue.toPortablePath(E)):[],A=n&&typeof n.locator=="string"?G.parseLocator(n.locator):null,p=n&&typeof n.version=="string"?n.version:null,h=e(o);return{parentLocator:A,sourceItem:h,patchPaths:u,sourceVersion:p}}function YQ(t){return t.range.startsWith("patch:")}function $h(t){return t.reference.startsWith("patch:")}function Ev(t){let{sourceItem:e,...r}=HC(t.range,G.parseDescriptor);return{...r,sourceDescriptor:e}}function Cv(t){let{sourceItem:e,...r}=HC(t.reference,G.parseLocator);return{...r,sourceLocator:e}}function uDt(t){let{sourceItem:e}=HC(t.range,G.parseDescriptor);return e}function ADt(t){let{sourceItem:e}=HC(t.reference,G.parseLocator);return e}function I5(t){if(!YQ(t))return t;let{sourceItem:e}=HC(t.range,G.parseDescriptor);return e}function WQ(t){if(!$h(t))return t;let{sourceItem:e}=HC(t.reference,G.parseLocator);return e}function kBe({parentLocator:t,sourceItem:e,patchPaths:r,sourceVersion:o,patchHash:a},n){let u=t!==null?{locator:G.stringifyLocator(t)}:{},A=typeof o<"u"?{version:o}:{},p=typeof a<"u"?{hash:a}:{};return G.makeRange({protocol:"patch:",source:n(e),selector:r.join("&"),params:{...A,...p,...u}})}function KQ(t,{parentLocator:e,sourceDescriptor:r,patchPaths:o}){return G.makeDescriptor(t,kBe({parentLocator:e,sourceItem:r,patchPaths:o},G.stringifyDescriptor))}function B5(t,{parentLocator:e,sourcePackage:r,patchPaths:o,patchHash:a}){return G.makeLocator(t,kBe({parentLocator:e,sourceItem:r,sourceVersion:r.version,patchPaths:o,patchHash:a},G.stringifyLocator))}function QBe({onAbsolute:t,onRelative:e,onProject:r,onBuiltin:o},a){let n=a.lastIndexOf("!");n!==-1&&(a=a.slice(n+1));let u=a.match(cDt);return u!==null?o(u[1]):a.startsWith("~/")?r(a.slice(2)):K.isAbsolute(a)?t(a):e(a)}function FBe(t){let e=t.lastIndexOf("!");return{optional:(e!==-1?new Set(t.slice(0,e).split(/!/)):new Set).has("optional")}}function v5(t){return QBe({onAbsolute:()=>!1,onRelative:()=>!0,onProject:()=>!1,onBuiltin:()=>!1},t)}async function wv(t,e,r){let o=t!==null?await r.fetcher.fetch(t,r):null,a=o&&o.localPath?{packageFs:new gn(It.root),prefixPath:K.relative(It.root,o.localPath)}:o;o&&o!==a&&o.releaseFs&&o.releaseFs();let n=await He.releaseAfterUseAsync(async()=>await Promise.all(e.map(async u=>{let A=FBe(u),p=await QBe({onAbsolute:async h=>await oe.readFilePromise(h,"utf8"),onRelative:async h=>{if(a===null)throw new Error("Assertion failed: The parent locator should have been fetched");return await a.packageFs.readFilePromise(K.join(a.prefixPath,h),"utf8")},onProject:async h=>await oe.readFilePromise(K.join(r.project.cwd,h),"utf8"),onBuiltin:async h=>await r.project.configuration.firstHook(E=>E.getBuiltinPatch,r.project,h)},u);return{...A,source:p}})));for(let u of n)typeof u.source=="string"&&(u.source=u.source.replace(/\r\n?/g,` +`));return n}async function D5(t,{cache:e,project:r}){let o=r.storedPackages.get(t.locatorHash);if(typeof o>"u")throw new Error("Assertion failed: Expected the package to be registered");let a=WQ(t),n=r.storedChecksums,u=new ki,A=await oe.mktempPromise(),p=K.join(A,"source"),h=K.join(A,"user"),E=K.join(A,".yarn-patch.json"),I=r.configuration.makeFetcher(),v=[];try{let x,C;if(t.locatorHash===a.locatorHash){let R=await I.fetch(t,{cache:e,project:r,fetcher:I,checksums:n,report:u});v.push(()=>R.releaseFs?.()),x=R,C=R}else x=await I.fetch(t,{cache:e,project:r,fetcher:I,checksums:n,report:u}),v.push(()=>x.releaseFs?.()),C=await I.fetch(t,{cache:e,project:r,fetcher:I,checksums:n,report:u}),v.push(()=>C.releaseFs?.());await Promise.all([oe.copyPromise(p,x.prefixPath,{baseFs:x.packageFs}),oe.copyPromise(h,C.prefixPath,{baseFs:C.packageFs}),oe.writeJsonPromise(E,{locator:G.stringifyLocator(t),version:o.version})])}finally{for(let x of v)x()}return oe.detachTemp(A),h}async function P5(t,e){let r=ue.fromPortablePath(t).replace(/\\/g,"/"),o=ue.fromPortablePath(e).replace(/\\/g,"/"),{stdout:a,stderr:n}=await Ur.execvp("git",["-c","core.safecrlf=false","diff","--src-prefix=a/","--dst-prefix=b/","--ignore-cr-at-eol","--full-index","--no-index","--no-renames","--text",r,o],{cwd:ue.toPortablePath(process.cwd()),env:{...process.env,GIT_CONFIG_NOSYSTEM:"1",HOME:"",XDG_CONFIG_HOME:"",USERPROFILE:""}});if(n.length>0)throw new Error(`Unable to diff directories. Make sure you have a recent version of 'git' available in PATH. +The following error was reported by 'git': +${n}`);let u=r.startsWith("/")?A=>A.slice(1):A=>A;return a.replace(new RegExp(`(a|b)(${He.escapeRegExp(`/${u(r)}/`)})`,"g"),"$1/").replace(new RegExp(`(a|b)${He.escapeRegExp(`/${u(o)}/`)}`,"g"),"$1/").replace(new RegExp(He.escapeRegExp(`${r}/`),"g"),"").replace(new RegExp(He.escapeRegExp(`${o}/`),"g"),"")}function S5(t,e){let r=[];for(let{source:o}of t){if(o===null)continue;let a=yv(o);for(let n of a){let{semverExclusivity:u,...A}=n;u!==null&&e!==null&&!Lr.satisfiesWithPrereleases(e,u)||r.push(JSON.stringify(A))}}return wn.makeHash(`${3}`,...r).slice(0,6)}Ge();function RBe(t,{configuration:e,report:r}){for(let o of t.parts)for(let a of o.lines)switch(o.type){case"context":r.reportInfo(null,` ${pe.pretty(e,a,"grey")}`);break;case"deletion":r.reportError(28,`- ${pe.pretty(e,a,pe.Type.REMOVED)}`);break;case"insertion":r.reportError(28,`+ ${pe.pretty(e,a,pe.Type.ADDED)}`);break;default:He.assertNever(o.type)}}var Iv=class{supports(e,r){return!!$h(e)}getLocalPath(e,r){return null}async fetch(e,r){let o=r.checksums.get(e.locatorHash)||null,[a,n,u]=await r.cache.fetchPackageFromCache(e,o,{onHit:()=>r.report.reportCacheHit(e),onMiss:()=>r.report.reportCacheMiss(e,`${G.prettyLocator(r.project.configuration,e)} can't be found in the cache and will be fetched from the disk`),loader:()=>this.patchPackage(e,r),...r.cacheOptions});return{packageFs:a,releaseFs:n,prefixPath:G.getIdentVendorPath(e),localPath:this.getLocalPath(e,r),checksum:u}}async patchPackage(e,r){let{parentLocator:o,sourceLocator:a,sourceVersion:n,patchPaths:u}=Cv(e),A=await wv(o,u,r),p=await oe.mktempPromise(),h=K.join(p,"current.zip"),E=await r.fetcher.fetch(a,r),I=G.getIdentVendorPath(e),v=new Zi(h,{create:!0,level:r.project.configuration.get("compressionLevel")});await He.releaseAfterUseAsync(async()=>{await v.copyPromise(I,E.prefixPath,{baseFs:E.packageFs,stableSort:!0})},E.releaseFs),v.saveAndClose();for(let{source:x,optional:C}of A){if(x===null)continue;let R=new Zi(h,{level:r.project.configuration.get("compressionLevel")}),L=new gn(K.resolve(It.root,I),{baseFs:R});try{await GQ(yv(x),{baseFs:L,version:n})}catch(U){if(!(U instanceof UC))throw U;let z=r.project.configuration.get("enableInlineHunks"),te=!z&&!C?" (set enableInlineHunks for details)":"",ae=`${G.prettyLocator(r.project.configuration,e)}: ${U.message}${te}`,le=ce=>{z&&RBe(U.hunk,{configuration:r.project.configuration,report:ce})};if(R.discardAndClose(),C){r.report.reportWarningOnce(66,ae,{reportExtra:le});continue}else throw new Jt(66,ae,le)}R.saveAndClose()}return new Zi(h,{level:r.project.configuration.get("compressionLevel")})}};Ge();var Bv=class{supportsDescriptor(e,r){return!!YQ(e)}supportsLocator(e,r){return!!$h(e)}shouldPersistResolution(e,r){return!1}bindDescriptor(e,r,o){let{patchPaths:a}=Ev(e);return a.every(n=>!v5(n))?e:G.bindDescriptor(e,{locator:G.stringifyLocator(r)})}getResolutionDependencies(e,r){let{sourceDescriptor:o}=Ev(e);return{sourceDescriptor:r.project.configuration.normalizeDependency(o)}}async getCandidates(e,r,o){if(!o.fetchOptions)throw new Error("Assertion failed: This resolver cannot be used unless a fetcher is configured");let{parentLocator:a,patchPaths:n}=Ev(e),u=await wv(a,n,o.fetchOptions),A=r.sourceDescriptor;if(typeof A>"u")throw new Error("Assertion failed: The dependency should have been resolved");let p=S5(u,A.version);return[B5(e,{parentLocator:a,sourcePackage:A,patchPaths:n,patchHash:p})]}async getSatisfying(e,r,o,a){let[n]=await this.getCandidates(e,r,a);return{locators:o.filter(u=>u.locatorHash===n.locatorHash),sorted:!1}}async resolve(e,r){let{sourceLocator:o}=Cv(e);return{...await r.resolver.resolve(o,r),...e}}};Ge();Pt();qt();var qC=class extends ut{constructor(){super(...arguments);this.save=ge.Boolean("-s,--save",!1,{description:"Add the patch to your resolution entries"});this.patchFolder=ge.String()}static{this.paths=[["patch-commit"]]}static{this.usage=it.Usage({description:"generate a patch out of a directory",details:"\n By default, this will print a patchfile on stdout based on the diff between the folder passed in and the original version of the package. Such file is suitable for consumption with the `patch:` protocol.\n\n With the `-s,--save` option set, the patchfile won't be printed on stdout anymore and will instead be stored within a local file (by default kept within `.yarn/patches`, but configurable via the `patchFolder` setting). A `resolutions` entry will also be added to your top-level manifest, referencing the patched package via the `patch:` protocol.\n\n Note that only folders generated by `yarn patch` are accepted as valid input for `yarn patch-commit`.\n "})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let n=K.resolve(this.context.cwd,ue.toPortablePath(this.patchFolder)),u=K.join(n,"../source"),A=K.join(n,"../.yarn-patch.json");if(!oe.existsSync(u))throw new st("The argument folder didn't get created by 'yarn patch'");let p=await P5(u,n),h=await oe.readJsonPromise(A),E=G.parseLocator(h.locator,!0);if(!o.storedPackages.has(E.locatorHash))throw new st("No package found in the project for the given locator");if(!this.save){this.context.stdout.write(p);return}let I=r.get("patchFolder"),v=K.join(I,`${G.slugifyLocator(E)}.patch`);await oe.mkdirPromise(I,{recursive:!0}),await oe.writeFilePromise(v,p);let x=[],C=new Map;for(let R of o.storedPackages.values()){if(G.isVirtualLocator(R))continue;let L=R.dependencies.get(E.identHash);if(!L)continue;let U=G.ensureDevirtualizedDescriptor(L),z=I5(U),te=o.storedResolutions.get(z.descriptorHash);if(!te)throw new Error("Assertion failed: Expected the resolution to have been registered");if(!o.storedPackages.get(te))throw new Error("Assertion failed: Expected the package to have been registered");let le=o.tryWorkspaceByLocator(R);if(le)x.push(le);else{let ce=o.originalPackages.get(R.locatorHash);if(!ce)throw new Error("Assertion failed: Expected the original package to have been registered");let Ce=ce.dependencies.get(L.identHash);if(!Ce)throw new Error("Assertion failed: Expected the original dependency to have been registered");C.set(Ce.descriptorHash,Ce)}}for(let R of x)for(let L of Ut.hardDependencies){let U=R.manifest[L].get(E.identHash);if(!U)continue;let z=KQ(U,{parentLocator:null,sourceDescriptor:G.convertLocatorToDescriptor(E),patchPaths:[K.join(dr.home,K.relative(o.cwd,v))]});R.manifest[L].set(U.identHash,z)}for(let R of C.values()){let L=KQ(R,{parentLocator:null,sourceDescriptor:G.convertLocatorToDescriptor(E),patchPaths:[K.join(dr.home,K.relative(o.cwd,v))]});o.topLevelWorkspace.manifest.resolutions.push({pattern:{descriptor:{fullName:G.stringifyIdent(L),description:R.range}},reference:L.range})}await o.persist()}};Ge();Pt();qt();var jC=class extends ut{constructor(){super(...arguments);this.update=ge.Boolean("-u,--update",!1,{description:"Reapply local patches that already apply to this packages"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.package=ge.String()}static{this.paths=[["patch"]]}static{this.usage=it.Usage({description:"prepare a package for patching",details:"\n This command will cause a package to be extracted in a temporary directory intended to be editable at will.\n\n Once you're done with your changes, run `yarn patch-commit -s path` (with `path` being the temporary directory you received) to generate a patchfile and register it into your top-level manifest via the `patch:` protocol. Run `yarn patch-commit -h` for more details.\n\n Calling the command when you already have a patch won't import it by default (in other words, the default behavior is to reset existing patches). However, adding the `-u,--update` flag will import any current patch.\n "})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let u=G.parseLocator(this.package);if(u.reference==="unknown"){let A=He.mapAndFilter([...o.storedPackages.values()],p=>p.identHash!==u.identHash?He.mapAndFilter.skip:G.isVirtualLocator(p)?He.mapAndFilter.skip:$h(p)!==this.update?He.mapAndFilter.skip:p);if(A.length===0)throw new st("No package found in the project for the given locator");if(A.length>1)throw new st(`Multiple candidate packages found; explicitly choose one of them (use \`yarn why \` to get more information as to who depends on them): +${A.map(p=>` +- ${G.prettyLocator(r,p)}`).join("")}`);u=A[0]}if(!o.storedPackages.has(u.locatorHash))throw new st("No package found in the project for the given locator");await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async A=>{let p=WQ(u),h=await D5(u,{cache:n,project:o});A.reportJson({locator:G.stringifyLocator(p),path:ue.fromPortablePath(h)});let E=this.update?" along with its current modifications":"";A.reportInfo(0,`Package ${G.prettyLocator(r,p)} got extracted with success${E}!`),A.reportInfo(0,`You can now edit the following folder: ${pe.pretty(r,ue.fromPortablePath(h),"magenta")}`),A.reportInfo(0,`Once you are done run ${pe.pretty(r,`yarn patch-commit -s ${process.platform==="win32"?'"':""}${ue.fromPortablePath(h)}${process.platform==="win32"?'"':""}`,"cyan")} and Yarn will store a patchfile based on your changes.`)})}};var fDt={configuration:{enableInlineHunks:{description:"If true, the installs will print unmatched patch hunks",type:"BOOLEAN",default:!1},patchFolder:{description:"Folder where the patch files must be written",type:"ABSOLUTE_PATH",default:"./.yarn/patches"}},commands:[qC,jC],fetchers:[Iv],resolvers:[Bv]},pDt=fDt;var Q5={};Vt(Q5,{PnpmLinker:()=>vv,default:()=>yDt});Ge();Pt();qt();var vv=class{getCustomDataKey(){return JSON.stringify({name:"PnpmLinker",version:3})}supportsPackage(e,r){return this.isEnabled(r)}async findPackageLocation(e,r){if(!this.isEnabled(r))throw new Error("Assertion failed: Expected the pnpm linker to be enabled");let o=this.getCustomDataKey(),a=r.project.linkersCustomData.get(o);if(!a)throw new st(`The project in ${pe.pretty(r.project.configuration,`${r.project.cwd}/package.json`,pe.Type.PATH)} doesn't seem to have been installed - running an install there might help`);let n=a.pathsByLocator.get(e.locatorHash);if(typeof n>"u")throw new st(`Couldn't find ${G.prettyLocator(r.project.configuration,e)} in the currently installed pnpm map - running an install might help`);return n.packageLocation}async findPackageLocator(e,r){if(!this.isEnabled(r))return null;let o=this.getCustomDataKey(),a=r.project.linkersCustomData.get(o);if(!a)throw new st(`The project in ${pe.pretty(r.project.configuration,`${r.project.cwd}/package.json`,pe.Type.PATH)} doesn't seem to have been installed - running an install there might help`);let n=e.match(/(^.*\/node_modules\/(@[^/]*\/)?[^/]+)(\/.*$)/);if(n){let p=a.locatorByPath.get(n[1]);if(p)return p}let u=e,A=e;do{A=u,u=K.dirname(A);let p=a.locatorByPath.get(A);if(p)return p}while(u!==A);return null}makeInstaller(e){return new x5(e)}isEnabled(e){return e.project.configuration.get("nodeLinker")==="pnpm"}},x5=class{constructor(e){this.opts=e;this.asyncActions=new He.AsyncActions(10);this.customData={pathsByLocator:new Map,locatorByPath:new Map};this.indexFolderPromise=cD(oe,{indexPath:K.join(e.project.configuration.get("globalFolder"),"index")})}attachCustomData(e){}async installPackage(e,r,o){switch(e.linkType){case"SOFT":return this.installPackageSoft(e,r,o);case"HARD":return this.installPackageHard(e,r,o)}throw new Error("Assertion failed: Unsupported package link type")}async installPackageSoft(e,r,o){let a=K.resolve(r.packageFs.getRealPath(),r.prefixPath),n=this.opts.project.tryWorkspaceByLocator(e)?K.join(a,dr.nodeModules):null;return this.customData.pathsByLocator.set(e.locatorHash,{packageLocation:a,dependenciesLocation:n}),{packageLocation:a,buildRequest:null}}async installPackageHard(e,r,o){let a=hDt(e,{project:this.opts.project}),n=a.packageLocation;this.customData.locatorByPath.set(n,G.stringifyLocator(e)),this.customData.pathsByLocator.set(e.locatorHash,a),o.holdFetchResult(this.asyncActions.set(e.locatorHash,async()=>{await oe.mkdirPromise(n,{recursive:!0}),await oe.copyPromise(n,r.prefixPath,{baseFs:r.packageFs,overwrite:!1,linkStrategy:{type:"HardlinkFromIndex",indexPath:await this.indexFolderPromise,autoRepair:!0}})}));let A=G.isVirtualLocator(e)?G.devirtualizeLocator(e):e,p={manifest:await Ut.tryFind(r.prefixPath,{baseFs:r.packageFs})??new Ut,misc:{hasBindingGyp:mA.hasBindingGyp(r)}},h=this.opts.project.getDependencyMeta(A,e.version),E=mA.extractBuildRequest(e,p,h,{configuration:this.opts.project.configuration});return{packageLocation:n,buildRequest:E}}async attachInternalDependencies(e,r){if(this.opts.project.configuration.get("nodeLinker")!=="pnpm"||!TBe(e,{project:this.opts.project}))return;let o=this.customData.pathsByLocator.get(e.locatorHash);if(typeof o>"u")throw new Error(`Assertion failed: Expected the package to have been registered (${G.stringifyLocator(e)})`);let{dependenciesLocation:a}=o;a&&this.asyncActions.reduce(e.locatorHash,async n=>{await oe.mkdirPromise(a,{recursive:!0});let u=await gDt(a),A=new Map(u),p=[n],h=(I,v)=>{let x=v;TBe(v,{project:this.opts.project})||(this.opts.report.reportWarningOnce(0,"The pnpm linker doesn't support providing different versions to workspaces' peer dependencies"),x=G.devirtualizeLocator(v));let C=this.customData.pathsByLocator.get(x.locatorHash);if(typeof C>"u")throw new Error(`Assertion failed: Expected the package to have been registered (${G.stringifyLocator(v)})`);let R=G.stringifyIdent(I),L=K.join(a,R),U=K.relative(K.dirname(L),C.packageLocation),z=A.get(R);A.delete(R),p.push(Promise.resolve().then(async()=>{if(z){if(z.isSymbolicLink()&&await oe.readlinkPromise(L)===U)return;await oe.removePromise(L)}await oe.mkdirpPromise(K.dirname(L)),process.platform=="win32"&&this.opts.project.configuration.get("winLinkType")==="junctions"?await oe.symlinkPromise(C.packageLocation,L,"junction"):await oe.symlinkPromise(U,L)}))},E=!1;for(let[I,v]of r)I.identHash===e.identHash&&(E=!0),h(I,v);!E&&!this.opts.project.tryWorkspaceByLocator(e)&&h(G.convertLocatorToDescriptor(e),e),p.push(dDt(a,A)),await Promise.all(p)})}async attachExternalDependents(e,r){throw new Error("External dependencies haven't been implemented for the pnpm linker")}async finalizeInstall(){let e=LBe(this.opts.project);if(this.opts.project.configuration.get("nodeLinker")!=="pnpm")await oe.removePromise(e);else{let r;try{r=new Set(await oe.readdirPromise(e))}catch{r=new Set}for(let{dependenciesLocation:o}of this.customData.pathsByLocator.values()){if(!o)continue;let a=K.contains(e,o);if(a===null)continue;let[n]=a.split(K.sep);r.delete(n)}await Promise.all([...r].map(async o=>{await oe.removePromise(K.join(e,o))}))}return await this.asyncActions.wait(),await k5(e),this.opts.project.configuration.get("nodeLinker")!=="node-modules"&&await k5(NBe(this.opts.project)),{customData:this.customData}}};function NBe(t){return K.join(t.cwd,dr.nodeModules)}function LBe(t){return K.join(NBe(t),".store")}function hDt(t,{project:e}){let r=G.slugifyLocator(t),o=LBe(e),a=K.join(o,r,"package"),n=K.join(o,r,dr.nodeModules);return{packageLocation:a,dependenciesLocation:n}}function TBe(t,{project:e}){return!G.isVirtualLocator(t)||!e.tryWorkspaceByLocator(t)}async function gDt(t){let e=new Map,r=[];try{r=await oe.readdirPromise(t,{withFileTypes:!0})}catch(o){if(o.code!=="ENOENT")throw o}try{for(let o of r)if(!o.name.startsWith("."))if(o.name.startsWith("@")){let a=await oe.readdirPromise(K.join(t,o.name),{withFileTypes:!0});if(a.length===0)e.set(o.name,o);else for(let n of a)e.set(`${o.name}/${n.name}`,n)}else e.set(o.name,o)}catch(o){if(o.code!=="ENOENT")throw o}return e}async function dDt(t,e){let r=[],o=new Set;for(let a of e.keys()){r.push(oe.removePromise(K.join(t,a)));let n=G.tryParseIdent(a)?.scope;n&&o.add(`@${n}`)}return Promise.all(r).then(()=>Promise.all([...o].map(a=>k5(K.join(t,a)))))}async function k5(t){try{await oe.rmdirPromise(t)}catch(e){if(e.code!=="ENOENT"&&e.code!=="ENOTEMPTY")throw e}}var mDt={linkers:[vv]},yDt=mDt;var O5={};Vt(O5,{StageCommand:()=>GC,default:()=>xDt,stageUtils:()=>zQ});Ge();Pt();qt();Ge();Pt();var zQ={};Vt(zQ,{ActionType:()=>F5,checkConsensus:()=>VQ,expandDirectory:()=>N5,findConsensus:()=>L5,findVcsRoot:()=>R5,genCommitMessage:()=>M5,getCommitPrefix:()=>MBe,isYarnFile:()=>T5});Pt();var F5=(n=>(n[n.CREATE=0]="CREATE",n[n.DELETE=1]="DELETE",n[n.ADD=2]="ADD",n[n.REMOVE=3]="REMOVE",n[n.MODIFY=4]="MODIFY",n))(F5||{});async function R5(t,{marker:e}){do if(!oe.existsSync(K.join(t,e)))t=K.dirname(t);else return t;while(t!=="/");return null}function T5(t,{roots:e,names:r}){if(r.has(K.basename(t)))return!0;do if(!e.has(t))t=K.dirname(t);else return!0;while(t!=="/");return!1}function N5(t){let e=[],r=[t];for(;r.length>0;){let o=r.pop(),a=oe.readdirSync(o);for(let n of a){let u=K.resolve(o,n);oe.lstatSync(u).isDirectory()?r.push(u):e.push(u)}}return e}function VQ(t,e){let r=0,o=0;for(let a of t)a!=="wip"&&(e.test(a)?r+=1:o+=1);return r>=o}function L5(t){let e=VQ(t,/^(\w\(\w+\):\s*)?\w+s/),r=VQ(t,/^(\w\(\w+\):\s*)?[A-Z]/),o=VQ(t,/^\w\(\w+\):/);return{useThirdPerson:e,useUpperCase:r,useComponent:o}}function MBe(t){return t.useComponent?"chore(yarn): ":""}var EDt=new Map([[0,"create"],[1,"delete"],[2,"add"],[3,"remove"],[4,"update"]]);function M5(t,e){let r=MBe(t),o=[],a=e.slice().sort((n,u)=>n[0]-u[0]);for(;a.length>0;){let[n,u]=a.shift(),A=EDt.get(n);t.useUpperCase&&o.length===0&&(A=`${A[0].toUpperCase()}${A.slice(1)}`),t.useThirdPerson&&(A+="s");let p=[u];for(;a.length>0&&a[0][0]===n;){let[,E]=a.shift();p.push(E)}p.sort();let h=p.shift();p.length===1?h+=" (and one other)":p.length>1&&(h+=` (and ${p.length} others)`),o.push(`${A} ${h}`)}return`${r}${o.join(", ")}`}var CDt="Commit generated via `yarn stage`",wDt=11;async function OBe(t){let{code:e,stdout:r}=await Ur.execvp("git",["log","-1","--pretty=format:%H"],{cwd:t});return e===0?r.trim():null}async function IDt(t,e){let r=[],o=e.filter(h=>K.basename(h.path)==="package.json");for(let{action:h,path:E}of o){let I=K.relative(t,E);if(h===4){let v=await OBe(t),{stdout:x}=await Ur.execvp("git",["show",`${v}:${I}`],{cwd:t,strict:!0}),C=await Ut.fromText(x),R=await Ut.fromFile(E),L=new Map([...R.dependencies,...R.devDependencies]),U=new Map([...C.dependencies,...C.devDependencies]);for(let[z,te]of U){let ae=G.stringifyIdent(te),le=L.get(z);le?le.range!==te.range&&r.push([4,`${ae} to ${le.range}`]):r.push([3,ae])}for(let[z,te]of L)U.has(z)||r.push([2,G.stringifyIdent(te)])}else if(h===0){let v=await Ut.fromFile(E);v.name?r.push([0,G.stringifyIdent(v.name)]):r.push([0,"a package"])}else if(h===1){let v=await OBe(t),{stdout:x}=await Ur.execvp("git",["show",`${v}:${I}`],{cwd:t,strict:!0}),C=await Ut.fromText(x);C.name?r.push([1,G.stringifyIdent(C.name)]):r.push([1,"a package"])}else throw new Error("Assertion failed: Unsupported action type")}let{code:a,stdout:n}=await Ur.execvp("git",["log",`-${wDt}`,"--pretty=format:%s"],{cwd:t}),u=a===0?n.split(/\n/g).filter(h=>h!==""):[],A=L5(u);return M5(A,r)}var BDt={0:[" A ","?? "],4:[" M "],1:[" D "]},vDt={0:["A "],4:["M "],1:["D "]},UBe={async findRoot(t){return await R5(t,{marker:".git"})},async filterChanges(t,e,r,o){let{stdout:a}=await Ur.execvp("git",["status","-s"],{cwd:t,strict:!0}),n=a.toString().split(/\n/g),u=o?.staged?vDt:BDt;return[].concat(...n.map(p=>{if(p==="")return[];let h=p.slice(0,3),E=K.resolve(t,p.slice(3));if(!o?.staged&&h==="?? "&&p.endsWith("/"))return N5(E).map(I=>({action:0,path:I}));{let v=[0,4,1].find(x=>u[x].includes(h));return v!==void 0?[{action:v,path:E}]:[]}})).filter(p=>T5(p.path,{roots:e,names:r}))},async genCommitMessage(t,e){return await IDt(t,e)},async makeStage(t,e){let r=e.map(o=>ue.fromPortablePath(o.path));await Ur.execvp("git",["add","--",...r],{cwd:t,strict:!0})},async makeCommit(t,e,r){let o=e.map(a=>ue.fromPortablePath(a.path));await Ur.execvp("git",["add","-N","--",...o],{cwd:t,strict:!0}),await Ur.execvp("git",["commit","-m",`${r} + +${CDt} +`,"--",...o],{cwd:t,strict:!0})},async makeReset(t,e){let r=e.map(o=>ue.fromPortablePath(o.path));await Ur.execvp("git",["reset","HEAD","--",...r],{cwd:t,strict:!0})}};var DDt=[UBe],GC=class extends ut{constructor(){super(...arguments);this.commit=ge.Boolean("-c,--commit",!1,{description:"Commit the staged files"});this.reset=ge.Boolean("-r,--reset",!1,{description:"Remove all files from the staging area"});this.dryRun=ge.Boolean("-n,--dry-run",!1,{description:"Print the commit message and the list of modified files without staging / committing"});this.update=ge.Boolean("-u,--update",!1,{hidden:!0})}static{this.paths=[["stage"]]}static{this.usage=it.Usage({description:"add all yarn files to your vcs",details:"\n This command will add to your staging area the files belonging to Yarn (typically any modified `package.json` and `.yarnrc.yml` files, but also linker-generated files, cache data, etc). It will take your ignore list into account, so the cache files won't be added if the cache is ignored in a `.gitignore` file (assuming you use Git).\n\n Running `--reset` will instead remove them from the staging area (the changes will still be there, but won't be committed until you stage them back).\n\n Since the staging area is a non-existent concept in Mercurial, Yarn will always create a new commit when running this command on Mercurial repositories. You can get this behavior when using Git by using the `--commit` flag which will directly create a commit.\n ",examples:[["Adds all modified project files to the staging area","yarn stage"],["Creates a new commit containing all modified project files","yarn stage --commit"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o}=await kt.find(r,this.context.cwd),{driver:a,root:n}=await PDt(o.cwd),u=[r.get("cacheFolder"),r.get("globalFolder"),r.get("virtualFolder"),r.get("yarnPath")];await r.triggerHook(I=>I.populateYarnPaths,o,I=>{u.push(I)});let A=new Set;for(let I of u)for(let v of SDt(n,I))A.add(v);let p=new Set([r.get("rcFilename"),dr.lockfile,dr.manifest]),h=await a.filterChanges(n,A,p),E=await a.genCommitMessage(n,h);if(this.dryRun)if(this.commit)this.context.stdout.write(`${E} +`);else for(let I of h)this.context.stdout.write(`${ue.fromPortablePath(I.path)} +`);else if(this.reset){let I=await a.filterChanges(n,A,p,{staged:!0});I.length===0?this.context.stdout.write("No staged changes found!"):await a.makeReset(n,I)}else h.length===0?this.context.stdout.write("No changes found!"):this.commit?await a.makeCommit(n,h,E):(await a.makeStage(n,h),this.context.stdout.write(E))}};async function PDt(t){let e=null,r=null;for(let o of DDt)if((r=await o.findRoot(t))!==null){e=o;break}if(e===null||r===null)throw new st("No stage driver has been found for your current project");return{driver:e,root:r}}function SDt(t,e){let r=[];if(e===null)return r;for(;;){(e===t||e.startsWith(`${t}/`))&&r.push(e);let o;try{o=oe.statSync(e)}catch{break}if(o.isSymbolicLink())e=K.resolve(K.dirname(e),oe.readlinkSync(e));else break}return r}var bDt={commands:[GC]},xDt=bDt;var U5={};Vt(U5,{default:()=>MDt});Ge();Ge();Pt();var qBe=Ze(Jn());Ge();var _Be=Ze(YH()),kDt="e8e1bd300d860104bb8c58453ffa1eb4",QDt="OFCNCOG2CU",HBe=async(t,e)=>{let r=G.stringifyIdent(t),a=FDt(e).initIndex("npm-search");try{return(await a.getObject(r,{attributesToRetrieve:["types"]})).types?.ts==="definitely-typed"}catch{return!1}},FDt=t=>(0,_Be.default)(QDt,kDt,{requester:{async send(r){try{let o=await sn.request(r.url,r.data||null,{configuration:t,headers:r.headers});return{content:o.body,isTimedOut:!1,status:o.statusCode}}catch(o){return{content:o.response.body,isTimedOut:!1,status:o.response.statusCode}}}}});var jBe=t=>t.scope?`${t.scope}__${t.name}`:`${t.name}`,RDt=async(t,e,r,o)=>{if(r.scope==="types")return;let{project:a}=t,{configuration:n}=a;if(!(n.get("tsEnableAutoTypes")??(oe.existsSync(K.join(t.cwd,"tsconfig.json"))||oe.existsSync(K.join(a.cwd,"tsconfig.json")))))return;let A=n.makeResolver(),p={project:a,resolver:A,report:new ki};if(!await HBe(r,n))return;let E=jBe(r),I=G.parseRange(r.range).selector;if(!Lr.validRange(I)){let L=n.normalizeDependency(r),U=await A.getCandidates(L,{},p);I=G.parseRange(U[0].reference).selector}let v=qBe.default.coerce(I);if(v===null)return;let x=`${Zc.Modifier.CARET}${v.major}`,C=G.makeDescriptor(G.makeIdent("types",E),x),R=He.mapAndFind(a.workspaces,L=>{let U=L.manifest.dependencies.get(r.identHash)?.descriptorHash,z=L.manifest.devDependencies.get(r.identHash)?.descriptorHash;if(U!==r.descriptorHash&&z!==r.descriptorHash)return He.mapAndFind.skip;let te=[];for(let ae of Ut.allDependencies){let le=L.manifest[ae].get(C.identHash);typeof le>"u"||te.push([ae,le])}return te.length===0?He.mapAndFind.skip:te});if(typeof R<"u")for(let[L,U]of R)t.manifest[L].set(U.identHash,U);else{try{let L=n.normalizeDependency(C);if((await A.getCandidates(L,{},p)).length===0)return}catch{return}t.manifest[Zc.Target.DEVELOPMENT].set(C.identHash,C)}},TDt=async(t,e,r)=>{if(r.scope==="types")return;let{project:o}=t,{configuration:a}=o;if(!(a.get("tsEnableAutoTypes")??(oe.existsSync(K.join(t.cwd,"tsconfig.json"))||oe.existsSync(K.join(o.cwd,"tsconfig.json")))))return;let u=jBe(r),A=G.makeIdent("types",u);for(let p of Ut.allDependencies)typeof t.manifest[p].get(A.identHash)>"u"||t.manifest[p].delete(A.identHash)},NDt=(t,e)=>{e.publishConfig&&e.publishConfig.typings&&(e.typings=e.publishConfig.typings),e.publishConfig&&e.publishConfig.types&&(e.types=e.publishConfig.types)},LDt={configuration:{tsEnableAutoTypes:{description:"Whether Yarn should auto-install @types/ dependencies on 'yarn add'",type:"BOOLEAN",isNullable:!0,default:null}},hooks:{afterWorkspaceDependencyAddition:RDt,afterWorkspaceDependencyRemoval:TDt,beforeWorkspacePacking:NDt}},MDt=LDt;var G5={};Vt(G5,{VersionApplyCommand:()=>zC,VersionCheckCommand:()=>JC,VersionCommand:()=>XC,default:()=>rPt,versionUtils:()=>VC});Ge();Ge();qt();var VC={};Vt(VC,{Decision:()=>WC,applyPrerelease:()=>zBe,applyReleases:()=>j5,applyStrategy:()=>XQ,clearVersionFiles:()=>_5,getUndecidedDependentWorkspaces:()=>Pv,getUndecidedWorkspaces:()=>JQ,openVersionFile:()=>KC,requireMoreDecisions:()=>$Dt,resolveVersionFiles:()=>Dv,suggestStrategy:()=>q5,updateVersionFiles:()=>H5,validateReleaseDecision:()=>YC});Ge();Pt();Nl();qt();var VBe=Ze(KBe()),BA=Ze(Jn()),ZDt=/^(>=|[~^]|)(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\+[0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*)?$/,WC=(u=>(u.UNDECIDED="undecided",u.DECLINE="decline",u.MAJOR="major",u.MINOR="minor",u.PATCH="patch",u.PRERELEASE="prerelease",u))(WC||{});function YC(t){let e=BA.default.valid(t);return e||He.validateEnum((0,VBe.default)(WC,"UNDECIDED"),t)}async function Dv(t,{prerelease:e=null}={}){let r=new Map,o=t.configuration.get("deferredVersionFolder");if(!oe.existsSync(o))return r;let a=await oe.readdirPromise(o);for(let n of a){if(!n.endsWith(".yml"))continue;let u=K.join(o,n),A=await oe.readFilePromise(u,"utf8"),p=Ki(A);for(let[h,E]of Object.entries(p.releases||{})){if(E==="decline")continue;let I=G.parseIdent(h),v=t.tryWorkspaceByIdent(I);if(v===null)throw new Error(`Assertion failed: Expected a release definition file to only reference existing workspaces (${K.basename(u)} references ${h})`);if(v.manifest.version===null)throw new Error(`Assertion failed: Expected the workspace to have a version (${G.prettyLocator(t.configuration,v.anchoredLocator)})`);let x=v.manifest.raw.stableVersion??v.manifest.version,C=r.get(v),R=XQ(x,YC(E));if(R===null)throw new Error(`Assertion failed: Expected ${x} to support being bumped via strategy ${E}`);let L=typeof C<"u"?BA.default.gt(R,C)?R:C:R;r.set(v,L)}}return e&&(r=new Map([...r].map(([n,u])=>[n,zBe(u,{current:n.manifest.version,prerelease:e})]))),r}async function _5(t){let e=t.configuration.get("deferredVersionFolder");oe.existsSync(e)&&await oe.removePromise(e)}async function H5(t,e){let r=new Set(e),o=t.configuration.get("deferredVersionFolder");if(!oe.existsSync(o))return;let a=await oe.readdirPromise(o);for(let n of a){if(!n.endsWith(".yml"))continue;let u=K.join(o,n),A=await oe.readFilePromise(u,"utf8"),p=Ki(A),h=p?.releases;if(h){for(let E of Object.keys(h)){let I=G.parseIdent(E),v=t.tryWorkspaceByIdent(I);(v===null||r.has(v))&&delete p.releases[E]}Object.keys(p.releases).length>0?await oe.changeFilePromise(u,Da(new Da.PreserveOrdering(p))):await oe.unlinkPromise(u)}}}async function KC(t,{allowEmpty:e=!1}={}){let r=t.configuration;if(r.projectCwd===null)throw new st("This command can only be run from within a Yarn project");let o=await ia.fetchRoot(r.projectCwd),a=o!==null?await ia.fetchBase(o,{baseRefs:r.get("changesetBaseRefs")}):null,n=o!==null?await ia.fetchChangedFiles(o,{base:a.hash,project:t}):[],u=r.get("deferredVersionFolder"),A=n.filter(x=>K.contains(u,x)!==null);if(A.length>1)throw new st(`Your current branch contains multiple versioning files; this isn't supported: +- ${A.map(x=>ue.fromPortablePath(x)).join(` +- `)}`);let p=new Set(He.mapAndFilter(n,x=>{let C=t.tryWorkspaceByFilePath(x);return C===null?He.mapAndFilter.skip:C}));if(A.length===0&&p.size===0&&!e)return null;let h=A.length===1?A[0]:K.join(u,`${wn.makeHash(Math.random().toString()).slice(0,8)}.yml`),E=oe.existsSync(h)?await oe.readFilePromise(h,"utf8"):"{}",I=Ki(E),v=new Map;for(let x of I.declined||[]){let C=G.parseIdent(x),R=t.getWorkspaceByIdent(C);v.set(R,"decline")}for(let[x,C]of Object.entries(I.releases||{})){let R=G.parseIdent(x),L=t.getWorkspaceByIdent(R);v.set(L,YC(C))}return{project:t,root:o,baseHash:a!==null?a.hash:null,baseTitle:a!==null?a.title:null,changedFiles:new Set(n),changedWorkspaces:p,releaseRoots:new Set([...p].filter(x=>x.manifest.version!==null)),releases:v,async saveAll(){let x={},C=[],R=[];for(let L of t.workspaces){if(L.manifest.version===null)continue;let U=G.stringifyIdent(L.anchoredLocator),z=v.get(L);z==="decline"?C.push(U):typeof z<"u"?x[U]=YC(z):p.has(L)&&R.push(U)}await oe.mkdirPromise(K.dirname(h),{recursive:!0}),await oe.changeFilePromise(h,Da(new Da.PreserveOrdering({releases:Object.keys(x).length>0?x:void 0,declined:C.length>0?C:void 0,undecided:R.length>0?R:void 0})))}}}function $Dt(t){return JQ(t).size>0||Pv(t).length>0}function JQ(t){let e=new Set;for(let r of t.changedWorkspaces)r.manifest.version!==null&&(t.releases.has(r)||e.add(r));return e}function Pv(t,{include:e=new Set}={}){let r=[],o=new Map(He.mapAndFilter([...t.releases],([n,u])=>u==="decline"?He.mapAndFilter.skip:[n.anchoredLocator.locatorHash,n])),a=new Map(He.mapAndFilter([...t.releases],([n,u])=>u!=="decline"?He.mapAndFilter.skip:[n.anchoredLocator.locatorHash,n]));for(let n of t.project.workspaces)if(!(!e.has(n)&&(a.has(n.anchoredLocator.locatorHash)||o.has(n.anchoredLocator.locatorHash)))&&n.manifest.version!==null)for(let u of Ut.hardDependencies)for(let A of n.manifest.getForScope(u).values()){let p=t.project.tryWorkspaceByDescriptor(A);p!==null&&o.has(p.anchoredLocator.locatorHash)&&r.push([n,p])}return r}function q5(t,e){let r=BA.default.clean(e);for(let o of Object.values(WC))if(o!=="undecided"&&o!=="decline"&&BA.default.inc(t,o)===r)return o;return null}function XQ(t,e){if(BA.default.valid(e))return e;if(t===null)throw new st(`Cannot apply the release strategy "${e}" unless the workspace already has a valid version`);if(!BA.default.valid(t))throw new st(`Cannot apply the release strategy "${e}" on a non-semver version (${t})`);let r=BA.default.inc(t,e);if(r===null)throw new st(`Cannot apply the release strategy "${e}" on the specified version (${t})`);return r}function j5(t,e,{report:r}){let o=new Map;for(let a of t.workspaces)for(let n of Ut.allDependencies)for(let u of a.manifest[n].values()){let A=t.tryWorkspaceByDescriptor(u);if(A===null||!e.has(A))continue;He.getArrayWithDefault(o,A).push([a,n,u.identHash])}for(let[a,n]of e){let u=a.manifest.version;a.manifest.version=n,BA.default.prerelease(n)===null?delete a.manifest.raw.stableVersion:a.manifest.raw.stableVersion||(a.manifest.raw.stableVersion=u);let A=a.manifest.name!==null?G.stringifyIdent(a.manifest.name):null;r.reportInfo(0,`${G.prettyLocator(t.configuration,a.anchoredLocator)}: Bumped to ${n}`),r.reportJson({cwd:ue.fromPortablePath(a.cwd),ident:A,oldVersion:u,newVersion:n});let p=o.get(a);if(!(typeof p>"u"))for(let[h,E,I]of p){let v=h.manifest[E].get(I);if(typeof v>"u")throw new Error("Assertion failed: The dependency should have existed");let x=v.range,C=!1;if(x.startsWith(ei.protocol)&&(x=x.slice(ei.protocol.length),C=!0,x===a.relativeCwd))continue;let R=x.match(ZDt);if(!R){r.reportWarning(0,`Couldn't auto-upgrade range ${x} (in ${G.prettyLocator(t.configuration,h.anchoredLocator)})`);continue}let L=`${R[1]}${n}`;C&&(L=`${ei.protocol}${L}`);let U=G.makeDescriptor(v,L);h.manifest[E].set(I,U)}}}var ePt=new Map([["%n",{extract:t=>t.length>=1?[t[0],t.slice(1)]:null,generate:(t=0)=>`${t+1}`}]]);function zBe(t,{current:e,prerelease:r}){let o=new BA.default.SemVer(e),a=o.prerelease.slice(),n=[];o.prerelease=[],o.format()!==t&&(a.length=0);let u=!0,A=r.split(/\./g);for(let p of A){let h=ePt.get(p);if(typeof h>"u")n.push(p),a[0]===p?a.shift():u=!1;else{let E=u?h.extract(a):null;E!==null&&typeof E[0]=="number"?(n.push(h.generate(E[0])),a=E[1]):(n.push(h.generate()),u=!1)}}return o.prerelease&&(o.prerelease=[]),`${t}-${n.join(".")}`}var zC=class extends ut{constructor(){super(...arguments);this.all=ge.Boolean("--all",!1,{description:"Apply the deferred version changes on all workspaces"});this.dryRun=ge.Boolean("--dry-run",!1,{description:"Print the versions without actually generating the package archive"});this.prerelease=ge.String("--prerelease",{description:"Add a prerelease identifier to new versions",tolerateBoolean:!0});this.recursive=ge.Boolean("-R,--recursive",{description:"Release the transitive workspaces as well"});this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"})}static{this.paths=[["version","apply"]]}static{this.usage=it.Usage({category:"Release-related commands",description:"apply all the deferred version bumps at once",details:` + This command will apply the deferred version changes and remove their definitions from the repository. + + Note that if \`--prerelease\` is set, the given prerelease identifier (by default \`rc.%n\`) will be used on all new versions and the version definitions will be kept as-is. + + By default only the current workspace will be bumped, but you can configure this behavior by using one of: + + - \`--recursive\` to also apply the version bump on its dependencies + - \`--all\` to apply the version bump on all packages in the repository + + Note that this command will also update the \`workspace:\` references across all your local workspaces, thus ensuring that they keep referring to the same workspaces even after the version bump. + `,examples:[["Apply the version change to the local workspace","yarn version apply"],["Apply the version change to all the workspaces in the local workspace","yarn version apply --all"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);if(!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState({restoreResolutions:!1});let u=await Rt.start({configuration:r,json:this.json,stdout:this.context.stdout},async A=>{let p=this.prerelease?typeof this.prerelease!="boolean"?this.prerelease:"rc.%n":null,h=await Dv(o,{prerelease:p}),E=new Map;if(this.all)E=h;else{let I=this.recursive?a.getRecursiveWorkspaceDependencies():[a];for(let v of I){let x=h.get(v);typeof x<"u"&&E.set(v,x)}}if(E.size===0){let I=h.size>0?" Did you want to add --all?":"";A.reportWarning(0,`The current workspace doesn't seem to require a version bump.${I}`);return}j5(o,E,{report:A}),this.dryRun||(p||(this.all?await _5(o):await H5(o,[...E.keys()])),A.reportSeparator())});return this.dryRun||u.hasErrors()?u.exitCode():await o.installWithNewReport({json:this.json,stdout:this.context.stdout},{cache:n})}};Ge();Pt();qt();var ZQ=Ze(Jn());var JC=class extends ut{constructor(){super(...arguments);this.interactive=ge.Boolean("-i,--interactive",{description:"Open an interactive interface used to set version bumps"})}static{this.paths=[["version","check"]]}static{this.usage=it.Usage({category:"Release-related commands",description:"check that all the relevant packages have been bumped",details:"\n **Warning:** This command currently requires Git.\n\n This command will check that all the packages covered by the files listed in argument have been properly bumped or declined to bump.\n\n In the case of a bump, the check will also cover transitive packages - meaning that should `Foo` be bumped, a package `Bar` depending on `Foo` will require a decision as to whether `Bar` will need to be bumped. This check doesn't cross packages that have declined to bump.\n\n In case no arguments are passed to the function, the list of modified files will be generated by comparing the HEAD against `master`.\n ",examples:[["Check whether the modified packages need a bump","yarn version check"]]})}async execute(){return this.interactive?await this.executeInteractive():await this.executeStandard()}async executeInteractive(){GE(this.context);let{Gem:r}=await Promise.resolve().then(()=>(Zk(),Eq)),{ScrollableItems:o}=await Promise.resolve().then(()=>(rQ(),tQ)),{FocusRequest:a}=await Promise.resolve().then(()=>(wq(),$we)),{useListInput:n}=await Promise.resolve().then(()=>(eQ(),eIe)),{renderForm:u}=await Promise.resolve().then(()=>(oQ(),sQ)),{Box:A,Text:p}=await Promise.resolve().then(()=>Ze(ic())),{default:h,useCallback:E,useState:I}=await Promise.resolve().then(()=>Ze(an())),v=await Ke.find(this.context.cwd,this.context.plugins),{project:x,workspace:C}=await kt.find(v,this.context.cwd);if(!C)throw new sr(x.cwd,this.context.cwd);await x.restoreInstallState();let R=await KC(x);if(R===null||R.releaseRoots.size===0)return 0;if(R.root===null)throw new st("This command can only be run on Git repositories");let L=()=>h.createElement(A,{flexDirection:"row",paddingBottom:1},h.createElement(A,{flexDirection:"column",width:60},h.createElement(A,null,h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},""),"/",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to select workspaces.")),h.createElement(A,null,h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},""),"/",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to select release strategies."))),h.createElement(A,{flexDirection:"column"},h.createElement(A,{marginLeft:1},h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to save.")),h.createElement(A,{marginLeft:1},h.createElement(p,null,"Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to abort.")))),U=({workspace:Ce,active:de,decision:Be,setDecision:Ee})=>{let g=Ce.manifest.raw.stableVersion??Ce.manifest.version;if(g===null)throw new Error(`Assertion failed: The version should have been set (${G.prettyLocator(v,Ce.anchoredLocator)})`);if(ZQ.default.prerelease(g)!==null)throw new Error(`Assertion failed: Prerelease identifiers shouldn't be found (${g})`);let me=["undecided","decline","patch","minor","major"];n(Be,me,{active:de,minus:"left",plus:"right",set:Ee});let we=Be==="undecided"?h.createElement(p,{color:"yellow"},g):Be==="decline"?h.createElement(p,{color:"green"},g):h.createElement(p,null,h.createElement(p,{color:"magenta"},g)," \u2192 ",h.createElement(p,{color:"green"},ZQ.default.valid(Be)?Be:ZQ.default.inc(g,Be)));return h.createElement(A,{flexDirection:"column"},h.createElement(A,null,h.createElement(p,null,G.prettyLocator(v,Ce.anchoredLocator)," - ",we)),h.createElement(A,null,me.map(Ae=>h.createElement(A,{key:Ae,paddingLeft:2},h.createElement(p,null,h.createElement(r,{active:Ae===Be})," ",Ae)))))},z=Ce=>{let de=new Set(R.releaseRoots),Be=new Map([...Ce].filter(([Ee])=>de.has(Ee)));for(;;){let Ee=Pv({project:R.project,releases:Be}),g=!1;if(Ee.length>0){for(let[me]of Ee)if(!de.has(me)){de.add(me),g=!0;let we=Ce.get(me);typeof we<"u"&&Be.set(me,we)}}if(!g)break}return{relevantWorkspaces:de,relevantReleases:Be}},te=()=>{let[Ce,de]=I(()=>new Map(R.releases)),Be=E((Ee,g)=>{let me=new Map(Ce);g!=="undecided"?me.set(Ee,g):me.delete(Ee);let{relevantReleases:we}=z(me);de(we)},[Ce,de]);return[Ce,Be]},ae=({workspaces:Ce,releases:de})=>{let Be=[];Be.push(`${Ce.size} total`);let Ee=0,g=0;for(let me of Ce){let we=de.get(me);typeof we>"u"?g+=1:we!=="decline"&&(Ee+=1)}return Be.push(`${Ee} release${Ee===1?"":"s"}`),Be.push(`${g} remaining`),h.createElement(p,{color:"yellow"},Be.join(", "))},ce=await u(({useSubmit:Ce})=>{let[de,Be]=te();Ce(de);let{relevantWorkspaces:Ee}=z(de),g=new Set([...Ee].filter(ne=>!R.releaseRoots.has(ne))),[me,we]=I(0),Ae=E(ne=>{switch(ne){case a.BEFORE:we(me-1);break;case a.AFTER:we(me+1);break}},[me,we]);return h.createElement(A,{flexDirection:"column"},h.createElement(L,null),h.createElement(A,null,h.createElement(p,{wrap:"wrap"},"The following files have been modified in your local checkout.")),h.createElement(A,{flexDirection:"column",marginTop:1,paddingLeft:2},[...R.changedFiles].map(ne=>h.createElement(A,{key:ne},h.createElement(p,null,h.createElement(p,{color:"grey"},ue.fromPortablePath(R.root)),ue.sep,ue.relative(ue.fromPortablePath(R.root),ue.fromPortablePath(ne)))))),R.releaseRoots.size>0&&h.createElement(h.Fragment,null,h.createElement(A,{marginTop:1},h.createElement(p,{wrap:"wrap"},"Because of those files having been modified, the following workspaces may need to be released again (note that private workspaces are also shown here, because even though they won't be published, releasing them will allow us to flag their dependents for potential re-release):")),g.size>3?h.createElement(A,{marginTop:1},h.createElement(ae,{workspaces:R.releaseRoots,releases:de})):null,h.createElement(A,{marginTop:1,flexDirection:"column"},h.createElement(o,{active:me%2===0,radius:1,size:2,onFocusRequest:Ae},[...R.releaseRoots].map(ne=>h.createElement(U,{key:ne.cwd,workspace:ne,decision:de.get(ne)||"undecided",setDecision:Z=>Be(ne,Z)}))))),g.size>0?h.createElement(h.Fragment,null,h.createElement(A,{marginTop:1},h.createElement(p,{wrap:"wrap"},"The following workspaces depend on other workspaces that have been marked for release, and thus may need to be released as well:")),h.createElement(A,null,h.createElement(p,null,"(Press ",h.createElement(p,{bold:!0,color:"cyanBright"},"")," to move the focus between the workspace groups.)")),g.size>5?h.createElement(A,{marginTop:1},h.createElement(ae,{workspaces:g,releases:de})):null,h.createElement(A,{marginTop:1,flexDirection:"column"},h.createElement(o,{active:me%2===1,radius:2,size:2,onFocusRequest:Ae},[...g].map(ne=>h.createElement(U,{key:ne.cwd,workspace:ne,decision:de.get(ne)||"undecided",setDecision:Z=>Be(ne,Z)}))))):null)},{versionFile:R},{stdin:this.context.stdin,stdout:this.context.stdout,stderr:this.context.stderr});if(typeof ce>"u")return 1;R.releases.clear();for(let[Ce,de]of ce)R.releases.set(Ce,de);await R.saveAll()}async executeStandard(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);return await o.restoreInstallState(),(await Rt.start({configuration:r,stdout:this.context.stdout},async u=>{let A=await KC(o);if(A===null||A.releaseRoots.size===0)return;if(A.root===null)throw new st("This command can only be run on Git repositories");if(u.reportInfo(0,`Your PR was started right after ${pe.pretty(r,A.baseHash.slice(0,7),"yellow")} ${pe.pretty(r,A.baseTitle,"magenta")}`),A.changedFiles.size>0){u.reportInfo(0,"You have changed the following files since then:"),u.reportSeparator();for(let v of A.changedFiles)u.reportInfo(null,`${pe.pretty(r,ue.fromPortablePath(A.root),"gray")}${ue.sep}${ue.relative(ue.fromPortablePath(A.root),ue.fromPortablePath(v))}`)}let p=!1,h=!1,E=JQ(A);if(E.size>0){p||u.reportSeparator();for(let v of E)u.reportError(0,`${G.prettyLocator(r,v.anchoredLocator)} has been modified but doesn't have a release strategy attached`);p=!0}let I=Pv(A);for(let[v,x]of I)h||u.reportSeparator(),u.reportError(0,`${G.prettyLocator(r,v.anchoredLocator)} doesn't have a release strategy attached, but depends on ${G.prettyWorkspace(r,x)} which is planned for release.`),h=!0;(p||h)&&(u.reportSeparator(),u.reportInfo(0,"This command detected that at least some workspaces have received modifications without explicit instructions as to how they had to be released (if needed)."),u.reportInfo(0,"To correct these errors, run `yarn version check --interactive` then follow the instructions."))})).exitCode()}};Ge();qt();var $Q=Ze(Jn());var XC=class extends ut{constructor(){super(...arguments);this.deferred=ge.Boolean("-d,--deferred",{description:"Prepare the version to be bumped during the next release cycle"});this.immediate=ge.Boolean("-i,--immediate",{description:"Bump the version immediately"});this.strategy=ge.String()}static{this.paths=[["version"]]}static{this.usage=it.Usage({category:"Release-related commands",description:"apply a new version to the current package",details:"\n This command will bump the version number for the given package, following the specified strategy:\n\n - If `major`, the first number from the semver range will be increased (`X.0.0`).\n - If `minor`, the second number from the semver range will be increased (`0.X.0`).\n - If `patch`, the third number from the semver range will be increased (`0.0.X`).\n - If prefixed by `pre` (`premajor`, ...), a `-0` suffix will be set (`0.0.0-0`).\n - If `prerelease`, the suffix will be increased (`0.0.0-X`); the third number from the semver range will also be increased if there was no suffix in the previous version.\n - If `decline`, the nonce will be increased for `yarn version check` to pass without version bump.\n - If a valid semver range, it will be used as new version.\n - If unspecified, Yarn will ask you for guidance.\n\n For more information about the `--deferred` flag, consult our documentation (https://yarnpkg.com/features/release-workflow#deferred-versioning).\n ",examples:[["Immediately bump the version to the next major","yarn version major"],["Prepare the version to be bumped to the next major","yarn version major --deferred"]]})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!a)throw new sr(o.cwd,this.context.cwd);let n=r.get("preferDeferredVersions");this.deferred&&(n=!0),this.immediate&&(n=!1);let u=$Q.default.valid(this.strategy),A=this.strategy==="decline",p;if(u)if(a.manifest.version!==null){let E=q5(a.manifest.version,this.strategy);E!==null?p=E:p=this.strategy}else p=this.strategy;else{let E=a.manifest.version;if(!A){if(E===null)throw new st("Can't bump the version if there wasn't a version to begin with - use 0.0.0 as initial version then run the command again.");if(typeof E!="string"||!$Q.default.valid(E))throw new st(`Can't bump the version (${E}) if it's not valid semver`)}p=YC(this.strategy)}if(!n){let I=(await Dv(o)).get(a);if(typeof I<"u"&&p!=="decline"){let v=XQ(a.manifest.version,p);if($Q.default.lt(v,I))throw new st(`Can't bump the version to one that would be lower than the current deferred one (${I})`)}}let h=await KC(o,{allowEmpty:!0});return h.releases.set(a,p),await h.saveAll(),n?0:await this.cli.run(["version","apply"])}};var tPt={configuration:{deferredVersionFolder:{description:"Folder where are stored the versioning files",type:"ABSOLUTE_PATH",default:"./.yarn/versions"},preferDeferredVersions:{description:"If true, running `yarn version` will assume the `--deferred` flag unless `--immediate` is set",type:"BOOLEAN",default:!1}},commands:[zC,JC,XC]},rPt=tPt;var Y5={};Vt(Y5,{WorkspacesFocusCommand:()=>ZC,WorkspacesForeachCommand:()=>ew,default:()=>sPt});Ge();Ge();qt();var ZC=class extends ut{constructor(){super(...arguments);this.json=ge.Boolean("--json",!1,{description:"Format the output as an NDJSON stream"});this.production=ge.Boolean("--production",!1,{description:"Only install regular dependencies by omitting dev dependencies"});this.all=ge.Boolean("-A,--all",!1,{description:"Install the entire project"});this.workspaces=ge.Rest()}static{this.paths=[["workspaces","focus"]]}static{this.usage=it.Usage({category:"Workspace-related commands",description:"install a single workspace and its dependencies",details:"\n This command will run an install as if the specified workspaces (and all other workspaces they depend on) were the only ones in the project. If no workspaces are explicitly listed, the active one will be assumed.\n\n Note that this command is only very moderately useful when using zero-installs, since the cache will contain all the packages anyway - meaning that the only difference between a full install and a focused install would just be a few extra lines in the `.pnp.cjs` file, at the cost of introducing an extra complexity.\n\n If the `-A,--all` flag is set, the entire project will be installed. Combine with `--production` to replicate the old `yarn install --production`.\n "})}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd),n=await Gr.find(r);await o.restoreInstallState({restoreResolutions:!1});let u;if(this.all)u=new Set(o.workspaces);else if(this.workspaces.length===0){if(!a)throw new sr(o.cwd,this.context.cwd);u=new Set([a])}else u=new Set(this.workspaces.map(A=>o.getWorkspaceByIdent(G.parseIdent(A))));for(let A of u)for(let p of this.production?["dependencies"]:Ut.hardDependencies)for(let h of A.manifest.getForScope(p).values()){let E=o.tryWorkspaceByDescriptor(h);E!==null&&u.add(E)}for(let A of o.workspaces)u.has(A)?this.production&&A.manifest.devDependencies.clear():(A.manifest.installConfig=A.manifest.installConfig||{},A.manifest.installConfig.selfReferences=!1,A.manifest.dependencies.clear(),A.manifest.devDependencies.clear(),A.manifest.peerDependencies.clear(),A.manifest.scripts.clear());return await o.installWithNewReport({json:this.json,stdout:this.context.stdout},{cache:n,persistProject:!1})}};Ge();Ge();Ge();qt();var $C=Ze($o()),XBe=Ze(eg());el();var ew=class extends ut{constructor(){super(...arguments);this.from=ge.Array("--from",{description:"An array of glob pattern idents or paths from which to base any recursion"});this.all=ge.Boolean("-A,--all",{description:"Run the command on all workspaces of a project"});this.recursive=ge.Boolean("-R,--recursive",{description:"Run the command on the current workspace and all of its recursive dependencies"});this.worktree=ge.Boolean("-W,--worktree",{description:"Run the command on all workspaces of the current worktree"});this.verbose=ge.Counter("-v,--verbose",{description:"Increase level of logging verbosity up to 2 times"});this.parallel=ge.Boolean("-p,--parallel",!1,{description:"Run the commands in parallel"});this.interlaced=ge.Boolean("-i,--interlaced",!1,{description:"Print the output of commands in real-time instead of buffering it"});this.jobs=ge.String("-j,--jobs",{description:"The maximum number of parallel tasks that the execution will be limited to; or `unlimited`",validator:IT([Js(["unlimited"]),jw(wT(),[vT(),BT(1)])])});this.topological=ge.Boolean("-t,--topological",!1,{description:"Run the command after all workspaces it depends on (regular) have finished"});this.topologicalDev=ge.Boolean("--topological-dev",!1,{description:"Run the command after all workspaces it depends on (regular + dev) have finished"});this.include=ge.Array("--include",[],{description:"An array of glob pattern idents or paths; only matching workspaces will be traversed"});this.exclude=ge.Array("--exclude",[],{description:"An array of glob pattern idents or paths; matching workspaces won't be traversed"});this.publicOnly=ge.Boolean("--no-private",{description:"Avoid running the command on private workspaces"});this.since=ge.String("--since",{description:"Only include workspaces that have been changed since the specified ref.",tolerateBoolean:!0});this.dryRun=ge.Boolean("-n,--dry-run",{description:"Print the commands that would be run, without actually running them"});this.commandName=ge.String();this.args=ge.Proxy()}static{this.paths=[["workspaces","foreach"]]}static{this.usage=it.Usage({category:"Workspace-related commands",description:"run a command on all workspaces",details:"\n This command will run a given sub-command on current and all its descendant workspaces. Various flags can alter the exact behavior of the command:\n\n - If `-p,--parallel` is set, the commands will be ran in parallel; they'll by default be limited to a number of parallel tasks roughly equal to half your core number, but that can be overridden via `-j,--jobs`, or disabled by setting `-j unlimited`.\n\n - If `-p,--parallel` and `-i,--interlaced` are both set, Yarn will print the lines from the output as it receives them. If `-i,--interlaced` wasn't set, it would instead buffer the output from each process and print the resulting buffers only after their source processes have exited.\n\n - If `-t,--topological` is set, Yarn will only run the command after all workspaces that it depends on through the `dependencies` field have successfully finished executing. If `--topological-dev` is set, both the `dependencies` and `devDependencies` fields will be considered when figuring out the wait points.\n\n - If `-A,--all` is set, Yarn will run the command on all the workspaces of a project.\n\n - If `-R,--recursive` is set, Yarn will find workspaces to run the command on by recursively evaluating `dependencies` and `devDependencies` fields, instead of looking at the `workspaces` fields.\n\n - If `-W,--worktree` is set, Yarn will find workspaces to run the command on by looking at the current worktree.\n\n - If `--from` is set, Yarn will use the packages matching the 'from' glob as the starting point for any recursive search.\n\n - If `--since` is set, Yarn will only run the command on workspaces that have been modified since the specified ref. By default Yarn will use the refs specified by the `changesetBaseRefs` configuration option.\n\n - If `--dry-run` is set, Yarn will explain what it would do without actually doing anything.\n\n - The command may apply to only some workspaces through the use of `--include` which acts as a whitelist. The `--exclude` flag will do the opposite and will be a list of packages that mustn't execute the script. Both flags accept glob patterns (if valid Idents and supported by [micromatch](https://github.com/micromatch/micromatch)). Make sure to escape the patterns, to prevent your own shell from trying to expand them.\n\n The `-v,--verbose` flag can be passed up to twice: once to prefix output lines with the originating workspace's name, and again to include start/finish/timing log lines. Maximum verbosity is enabled by default in terminal environments.\n\n If the command is `run` and the script being run does not exist the child workspace will be skipped without error.\n ",examples:[["Publish all packages","yarn workspaces foreach -A npm publish --tolerate-republish"],["Run the build script on all descendant packages","yarn workspaces foreach -A run build"],["Run the build script on current and all descendant packages in parallel, building package dependencies first","yarn workspaces foreach -Apt run build"],["Run the build script on several packages and all their dependencies, building dependencies first","yarn workspaces foreach -Rpt --from '{workspace-a,workspace-b}' run build"]]})}static{this.schema=[Yw("all",Yu.Forbids,["from","recursive","since","worktree"],{missingIf:"undefined"}),DT(["all","recursive","since","worktree"],{missingIf:"undefined"})]}async execute(){let r=await Ke.find(this.context.cwd,this.context.plugins),{project:o,workspace:a}=await kt.find(r,this.context.cwd);if(!this.all&&!a)throw new sr(o.cwd,this.context.cwd);await o.restoreInstallState();let n=this.cli.process([this.commandName,...this.args]),u=n.path.length===1&&n.path[0]==="run"&&typeof n.scriptName<"u"?n.scriptName:null;if(n.path.length===0)throw new st("Invalid subcommand name for iteration - use the 'run' keyword if you wish to execute a script");let A=Ee=>{this.dryRun&&this.context.stdout.write(`${Ee} +`)},p=()=>{let Ee=this.from.map(g=>$C.default.matcher(g));return o.workspaces.filter(g=>{let me=G.stringifyIdent(g.anchoredLocator),we=g.relativeCwd;return Ee.some(Ae=>Ae(me)||Ae(we))})},h=[];if(this.since?(A("Option --since is set; selecting the changed workspaces as root for workspace selection"),h=Array.from(await ia.fetchChangedWorkspaces({ref:this.since,project:o}))):this.from?(A("Option --from is set; selecting the specified workspaces"),h=[...p()]):this.worktree?(A("Option --worktree is set; selecting the current workspace"),h=[a]):this.recursive?(A("Option --recursive is set; selecting the current workspace"),h=[a]):this.all&&(A("Option --all is set; selecting all workspaces"),h=[...o.workspaces]),this.dryRun&&!this.all){for(let Ee of h)A(` +- ${Ee.relativeCwd} + ${G.prettyLocator(r,Ee.anchoredLocator)}`);h.length>0&&A("")}let E;if(this.recursive?this.since?(A("Option --recursive --since is set; recursively selecting all dependent workspaces"),E=new Set(h.map(Ee=>[...Ee.getRecursiveWorkspaceDependents()]).flat())):(A("Option --recursive is set; recursively selecting all transitive dependencies"),E=new Set(h.map(Ee=>[...Ee.getRecursiveWorkspaceDependencies()]).flat())):this.worktree?(A("Option --worktree is set; recursively selecting all nested workspaces"),E=new Set(h.map(Ee=>[...Ee.getRecursiveWorkspaceChildren()]).flat())):E=null,E!==null&&(h=[...new Set([...h,...E])],this.dryRun))for(let Ee of E)A(` +- ${Ee.relativeCwd} + ${G.prettyLocator(r,Ee.anchoredLocator)}`);let I=[],v=!1;if(u?.includes(":")){for(let Ee of o.workspaces)if(Ee.manifest.scripts.has(u)&&(v=!v,v===!1))break}for(let Ee of h){if(u&&!Ee.manifest.scripts.has(u)&&!v&&!(await An.getWorkspaceAccessibleBinaries(Ee)).has(u)){A(`Excluding ${Ee.relativeCwd} because it doesn't have a "${u}" script`);continue}if(!(u===r.env.npm_lifecycle_event&&Ee.cwd===a.cwd)){if(this.include.length>0&&!$C.default.isMatch(G.stringifyIdent(Ee.anchoredLocator),this.include)&&!$C.default.isMatch(Ee.relativeCwd,this.include)){A(`Excluding ${Ee.relativeCwd} because it doesn't match the --include filter`);continue}if(this.exclude.length>0&&($C.default.isMatch(G.stringifyIdent(Ee.anchoredLocator),this.exclude)||$C.default.isMatch(Ee.relativeCwd,this.exclude))){A(`Excluding ${Ee.relativeCwd} because it matches the --include filter`);continue}if(this.publicOnly&&Ee.manifest.private===!0){A(`Excluding ${Ee.relativeCwd} because it's a private workspace and --no-private was set`);continue}I.push(Ee)}}if(this.dryRun)return 0;let x=this.verbose??(this.context.stdout.isTTY?1/0:0),C=x>0,R=x>1,L=this.parallel?this.jobs==="unlimited"?1/0:Number(this.jobs)||Math.ceil(Xi.availableParallelism()/2):1,U=L===1?!1:this.parallel,z=U?this.interlaced:!0,te=(0,XBe.default)(L),ae=new Map,le=new Set,ce=0,Ce=null,de=!1,Be=await Rt.start({configuration:r,stdout:this.context.stdout,includePrefix:!1},async Ee=>{let g=async(me,{commandIndex:we})=>{if(de)return-1;!U&&R&&we>1&&Ee.reportSeparator();let Ae=nPt(me,{configuration:r,label:C,commandIndex:we}),[ne,Z]=JBe(Ee,{prefix:Ae,interlaced:z}),[xe,Ne]=JBe(Ee,{prefix:Ae,interlaced:z});try{R&&Ee.reportInfo(null,`${Ae?`${Ae} `:""}Process started`);let ht=Date.now(),H=await this.cli.run([this.commandName,...this.args],{cwd:me.cwd,stdout:ne,stderr:xe})||0;ne.end(),xe.end(),await Z,await Ne;let rt=Date.now();if(R){let Te=r.get("enableTimers")?`, completed in ${pe.pretty(r,rt-ht,pe.Type.DURATION)}`:"";Ee.reportInfo(null,`${Ae?`${Ae} `:""}Process exited (exit code ${H})${Te}`)}return H===130&&(de=!0,Ce=H),H}catch(ht){throw ne.end(),xe.end(),await Z,await Ne,ht}};for(let me of I)ae.set(me.anchoredLocator.locatorHash,me);for(;ae.size>0&&!Ee.hasErrors();){let me=[];for(let[ne,Z]of ae){if(le.has(Z.anchoredDescriptor.descriptorHash))continue;let xe=!0;if(this.topological||this.topologicalDev){let Ne=this.topologicalDev?new Map([...Z.manifest.dependencies,...Z.manifest.devDependencies]):Z.manifest.dependencies;for(let ht of Ne.values()){let H=o.tryWorkspaceByDescriptor(ht);if(xe=H===null||!ae.has(H.anchoredLocator.locatorHash),!xe)break}}if(xe&&(le.add(Z.anchoredDescriptor.descriptorHash),me.push(te(async()=>{let Ne=await g(Z,{commandIndex:++ce});return ae.delete(ne),le.delete(Z.anchoredDescriptor.descriptorHash),Ne})),!U))break}if(me.length===0){let ne=Array.from(ae.values()).map(Z=>G.prettyLocator(r,Z.anchoredLocator)).join(", ");Ee.reportError(3,`Dependency cycle detected (${ne})`);return}let Ae=(await Promise.all(me)).find(ne=>ne!==0);Ce===null&&(Ce=typeof Ae<"u"?1:Ce),(this.topological||this.topologicalDev)&&typeof Ae<"u"&&Ee.reportError(0,"The command failed for workspaces that are depended upon by other workspaces; can't satisfy the dependency graph")}});return Ce!==null?Ce:Be.exitCode()}};function JBe(t,{prefix:e,interlaced:r}){let o=t.createStreamReporter(e),a=new He.DefaultStream;a.pipe(o,{end:!1}),a.on("finish",()=>{o.end()});let n=new Promise(A=>{o.on("finish",()=>{A(a.active)})});if(r)return[a,n];let u=new He.BufferStream;return u.pipe(a,{end:!1}),u.on("finish",()=>{a.end()}),[u,n]}function nPt(t,{configuration:e,commandIndex:r,label:o}){if(!o)return null;let n=`[${G.stringifyIdent(t.anchoredLocator)}]:`,u=["#2E86AB","#A23B72","#F18F01","#C73E1D","#CCE2A3"],A=u[r%u.length];return pe.pretty(e,n,A)}var iPt={commands:[ZC,ew]},sPt=iPt;var Hy=()=>({modules:new Map([["@yarnpkg/cli",W1],["@yarnpkg/core",Y1],["@yarnpkg/fslib",kw],["@yarnpkg/libzip",p1],["@yarnpkg/parsers",Ow],["@yarnpkg/shell",E1],["clipanion",Jw],["semver",oPt],["typanion",Vo],["@yarnpkg/plugin-essentials",K8],["@yarnpkg/plugin-compat",Z8],["@yarnpkg/plugin-constraints",dH],["@yarnpkg/plugin-dlx",mH],["@yarnpkg/plugin-exec",CH],["@yarnpkg/plugin-file",IH],["@yarnpkg/plugin-git",W8],["@yarnpkg/plugin-github",DH],["@yarnpkg/plugin-http",PH],["@yarnpkg/plugin-init",SH],["@yarnpkg/plugin-interactive-tools",kq],["@yarnpkg/plugin-link",Qq],["@yarnpkg/plugin-nm",hj],["@yarnpkg/plugin-npm",f5],["@yarnpkg/plugin-npm-cli",w5],["@yarnpkg/plugin-pack",a5],["@yarnpkg/plugin-patch",b5],["@yarnpkg/plugin-pnp",rj],["@yarnpkg/plugin-pnpm",Q5],["@yarnpkg/plugin-stage",O5],["@yarnpkg/plugin-typescript",U5],["@yarnpkg/plugin-version",G5],["@yarnpkg/plugin-workspace-tools",Y5]]),plugins:new Set(["@yarnpkg/plugin-essentials","@yarnpkg/plugin-compat","@yarnpkg/plugin-constraints","@yarnpkg/plugin-dlx","@yarnpkg/plugin-exec","@yarnpkg/plugin-file","@yarnpkg/plugin-git","@yarnpkg/plugin-github","@yarnpkg/plugin-http","@yarnpkg/plugin-init","@yarnpkg/plugin-interactive-tools","@yarnpkg/plugin-link","@yarnpkg/plugin-nm","@yarnpkg/plugin-npm","@yarnpkg/plugin-npm-cli","@yarnpkg/plugin-pack","@yarnpkg/plugin-patch","@yarnpkg/plugin-pnp","@yarnpkg/plugin-pnpm","@yarnpkg/plugin-stage","@yarnpkg/plugin-typescript","@yarnpkg/plugin-version","@yarnpkg/plugin-workspace-tools"])});function eve({cwd:t,pluginConfiguration:e}){let r=new Jo({binaryLabel:"Yarn Package Manager",binaryName:"yarn",binaryVersion:nn??""});return Object.assign(r,{defaultContext:{...Jo.defaultContext,cwd:t,plugins:e,quiet:!1,stdin:process.stdin,stdout:process.stdout,stderr:process.stderr}})}function aPt(t){if(He.parseOptionalBoolean(process.env.YARN_IGNORE_NODE))return!0;let r=process.versions.node,o=">=18.12.0";if(Lr.satisfiesWithPrereleases(r,o))return!0;let a=new st(`This tool requires a Node version compatible with ${o} (got ${r}). Upgrade Node, or set \`YARN_IGNORE_NODE=1\` in your environment.`);return Jo.defaultContext.stdout.write(t.error(a)),!1}async function tve({selfPath:t,pluginConfiguration:e}){return await Ke.find(ue.toPortablePath(process.cwd()),e,{strict:!1,usePathCheck:t})}function lPt(t,e,{yarnPath:r}){if(!oe.existsSync(r))return t.error(new Error(`The "yarn-path" option has been set, but the specified location doesn't exist (${r}).`)),1;process.on("SIGINT",()=>{});let o={stdio:"inherit",env:{...process.env,YARN_IGNORE_PATH:"1"}};try{(0,ZBe.execFileSync)(process.execPath,[ue.fromPortablePath(r),...e],o)}catch(a){return a.status??1}return 0}function cPt(t,e){let r=null,o=e;return e.length>=2&&e[0]==="--cwd"?(r=ue.toPortablePath(e[1]),o=e.slice(2)):e.length>=1&&e[0].startsWith("--cwd=")?(r=ue.toPortablePath(e[0].slice(6)),o=e.slice(1)):e[0]==="add"&&e[e.length-2]==="--cwd"&&(r=ue.toPortablePath(e[e.length-1]),o=e.slice(0,e.length-2)),t.defaultContext.cwd=r!==null?K.resolve(r):K.cwd(),o}function uPt(t,{configuration:e}){if(!e.get("enableTelemetry")||$Be.isCI||!process.stdout.isTTY)return;Ke.telemetry=new Oy(e,"puba9cdc10ec5790a2cf4969dd413a47270");let o=/^@yarnpkg\/plugin-(.*)$/;for(let a of e.plugins.keys())Uy.has(a.match(o)?.[1]??"")&&Ke.telemetry?.reportPluginName(a);t.binaryVersion&&Ke.telemetry.reportVersion(t.binaryVersion)}function rve(t,{configuration:e}){for(let r of e.plugins.values())for(let o of r.commands||[])t.register(o)}async function APt(t,e,{selfPath:r,pluginConfiguration:o}){if(!aPt(t))return 1;let a=await tve({selfPath:r,pluginConfiguration:o}),n=a.get("yarnPath"),u=a.get("ignorePath");if(n&&!u)return lPt(t,e,{yarnPath:n});delete process.env.YARN_IGNORE_PATH;let A=cPt(t,e);uPt(t,{configuration:a}),rve(t,{configuration:a});let p=t.process(A,t.defaultContext);return p.help||Ke.telemetry?.reportCommandName(p.path.join(" ")),await t.run(p,t.defaultContext)}async function ihe({cwd:t=K.cwd(),pluginConfiguration:e=Hy()}={}){let r=eve({cwd:t,pluginConfiguration:e}),o=await tve({pluginConfiguration:e,selfPath:null});return rve(r,{configuration:o}),r}async function Wx(t,{cwd:e=K.cwd(),selfPath:r,pluginConfiguration:o}){let a=eve({cwd:e,pluginConfiguration:o});function n(){Jo.defaultContext.stdout.write(`ERROR: Yarn is terminating due to an unexpected empty event loop. +Please report this issue at https://github.com/yarnpkg/berry/issues.`)}process.once("beforeExit",n);try{process.exitCode=42,process.exitCode=await APt(a,t,{selfPath:r,pluginConfiguration:o})}catch(u){Jo.defaultContext.stdout.write(a.error(u)),process.exitCode=1}finally{process.off("beforeExit",n),await oe.rmtempPromise()}}Wx(process.argv.slice(2),{cwd:K.cwd(),selfPath:ue.toPortablePath(ue.resolve(process.argv[1])),pluginConfiguration:Hy()});})(); +/** + @license + Copyright (c) 2015, Rebecca Turner + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND + FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + */ +/** + @license + Copyright Node.js contributors. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal in the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. +*/ +/** + @license + The MIT License (MIT) + + Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ +/** + @license + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to permit + persons to whom the Software is furnished to do so, subject to the + following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN + NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR + OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ +/*! Bundled license information: + +is-number/index.js: + (*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + *) + +to-regex-range/index.js: + (*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + *) + +fill-range/index.js: + (*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + *) + +is-extglob/index.js: + (*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + *) + +is-glob/index.js: + (*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + *) + +queue-microtask/index.js: + (*! queue-microtask. MIT License. Feross Aboukhadijeh *) + +run-parallel/index.js: + (*! run-parallel. MIT License. Feross Aboukhadijeh *) + +git-url-parse/lib/index.js: + (*! + * buildToken + * Builds OAuth token prefix (helper function) + * + * @name buildToken + * @function + * @param {GitUrl} obj The parsed Git url object. + * @return {String} token prefix + *) + +object-assign/index.js: + (* + object-assign + (c) Sindre Sorhus + @license MIT + *) + +react/cjs/react.production.min.js: + (** @license React v16.13.1 + * react.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +scheduler/cjs/scheduler.production.min.js: + (** @license React v0.18.0 + * scheduler.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +react-reconciler/cjs/react-reconciler.production.min.js: + (** @license React v0.24.0 + * react-reconciler.production.min.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + *) + +is-windows/index.js: + (*! + * is-windows + * + * Copyright © 2015-2018, Jon Schlinkert. + * Released under the MIT License. + *) +*/ diff --git a/docs/docs-beta/.yarnrc.yml b/docs/docs-beta/.yarnrc.yml new file mode 100644 index 0000000000000..b094c9570fc86 --- /dev/null +++ b/docs/docs-beta/.yarnrc.yml @@ -0,0 +1,3 @@ +nodeLinker: node-modules + +yarnPath: .yarn/releases/yarn-4.4.0.cjs diff --git a/docs/docs-beta/README.md b/docs/docs-beta/README.md new file mode 100644 index 0000000000000..dc02dd3a0d734 --- /dev/null +++ b/docs/docs-beta/README.md @@ -0,0 +1,145 @@ +# Dagster Docs - Beta + +This is the home of the new Dagster documentation. It is currently in beta and incomplete. +The documentation site is built using [Docusaurus](https://docusaurus.io/), a modern static website generator. + +--- + +## Installation + +The site uses [yarn](https://yarnpkg.com/) for package management. + +To install dependencies: + +``` +yarn install +``` + +**Note**: The yarn binary is checked in, so you do not need to install yarn yourself. + +It also uses [Vale](https://vale.sh/) to check for issues in the documentation. + +Install Vale with: + +```bash +brew install vale +``` + +or + +```bash +pip install vale +``` + +--- + +## Overview of the docs + +- `./src` contains custom components, styles, themes, and layouts. +- `./content-templates` contains the templates for the documentation pages. +- `./docs/` is the source of truth for the documentation. +- `/examples/docs_beta_snippets/docs_beta_snippets/` contains all code examples for the documentation. + +The docs are broken down into the following sections: + +- [Tutorials](./docs/tutorials/) +- [Guides](./docs/guides/) +- [Concepts](./docs/concepts/) + +`sidebar.ts` and `docusaurus.config.ts` are the main configuration files for the documentation. + +--- + +## Local Development + +To start the local development server: + +```bash +yarn start +``` + +This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. Access the website at [http://localhost:3050](http://localhost:3050). + +### Linters + +To check the documentation for different issues, use the following: + +```bash +## Lints all content, applies lint autofixes and prettier changes +yarn lint + +## Lints documentation content using Vale Server +## Checks for style guide adherence, grammar, spelling, etc. +yarn vale +yarn vale /path/to/file ## check individual file +yarn vale --no-wrap ## remove wrapping from output +``` + +### Diagrams + +You can use [Mermaid.js](https://mermaid.js.org/syntax/flowchart.html) to create diagrams. For example: + +```mermaid +flowchart LR + Start --> Stop +``` + +Refer to the [Mermaid.js documentation](https://mermaid.js.org/) for more info. + +### Code examples + +To include code snippets, use the following format: + +``` + +``` + +The `filePath` is relative to the `./examples/docs_beta_snippets/docs_beta_snippets/` directory. + +At minimum, all `.py` files in the `docs_beta_snippets` directory are tested by attempting to load the Python files. +You can write additional tests for them in the `docs_beta_snippets_test` folder. See the folder for more information. + +To type-check the code snippets during development, run the following command from the Dagster root folder. +This will run `pyright` on all new/changed files relative to the master branch. + +``` +make quick_pyright +``` + +--- + +## Build + +To build the site for production: + +```bash +yarn build +``` + +This command generates static content into the `build` directory and can be served using any static contents hosting service. This also checks for any broken links in the documentation. + +## Deployment + +This site is built and deployed using Vercel. + +### API Documentation + +API documentation is built in Vercel by overriding the _Build Command_ to the following: + +```sh +yarn sync-api-docs && yarn build +``` + +This runs the `scripts/vercel-sync-api-docs.sh` script which builds the MDX files using the custom `sphinx-mdx-builder`, and copies the resulting MDX files to `docs/api`. + +## Search + +Algolia search is used for search results on the website, as configured in `docusaurus.config.ts`. + +The following environment variables must be configured in Vercel: + +- `ALGOLIA_APP_ID` +- `ALGOLIA_API_KEY` +- `ALGOLIA_INDEX_NAME` + +These variables are not loaded when `process.env.ENV === 'development'`. diff --git a/docs/docs-beta/babel.config.js b/docs/docs-beta/babel.config.js new file mode 100644 index 0000000000000..e00595dae7d69 --- /dev/null +++ b/docs/docs-beta/babel.config.js @@ -0,0 +1,3 @@ +module.exports = { + presets: [require.resolve('@docusaurus/core/lib/babel/preset')], +}; diff --git a/docs/docs-beta/content-templates/concept.md b/docs/docs-beta/content-templates/concept.md new file mode 100644 index 0000000000000..6ca3de57231bb --- /dev/null +++ b/docs/docs-beta/content-templates/concept.md @@ -0,0 +1,88 @@ +--- +title: '' +description: '' +--- + +This section is an intro that includes: + +- A brief description of what the topic is, +- An example of how it could be used in the real-world +- What it can do in the UI + +--- + +## Benefits + +This section lists the benefits of using the topic, whatever it is. The items listed here should be solutions to real-world problems that the user cares about, ex: + +Using schedules helps you: + +- Predictably process and deliver data to stakeholders and business-critical applications +- Consistently run data pipelines without the need for manual intervention +- Optimize resource usage by scheduling pipelines to run during off-peak hours + +Using [TOPIC] helps you: + +- A benefit of the thing +- Another benefit +- And one more + +--- + +## Prerequisites + +This section lists the prerequisites users must complete before they should/can proceed. For concepts, we should list the other concepts they should be familiar with first. + +Before continuing, you should be familiar with: + +- Ex: To use asset checks, users should understand Asset definitions first +- Another one +- One more + +--- + +## How it works + +This section provides a high-level overview of how the concept works without getting too into the technical details. Code can be shown here, but this section shouldn't focus on it. The goal is to help the user generally understand how the thing works and what they need to do to get it working without overwhelming them with details. + +For example, this is the How it works for Schedules: + +Schedules run jobs at fixed time intervals and have two main components: + +- A job, which targets a selection of assets or ops + +- A cron expression, which defines when the schedule runs. Basic and complex schedules are supported, allowing you to have fine-grained control over when runs are executed. With cron syntax, you can: + + - Create custom schedules like Every hour from 9:00AM - 5:00PM with cron expressions (0 9-17 \* \* \*) + - Quickly create basic schedules like Every day at midnight with predefined cron definitions (@daily, @midnight) + + To make creating cron expressions easier, you can use an online tool like Crontab Guru. This tool allows you to create and describe cron expressions in a human-readable format and test the execution dates produced by the expression. Note: While this tool is useful for general cron expression testing, always remember to test your schedules in Dagster to ensure the results are as expected. + +For a schedule to run, it must be turned on and an active dagster-daemon process must be running. If you used `dagster dev` to start the Dagster UI/webserver, the daemon process will be automatically launched alongside the webserver. + +After these criteria are met, the schedule will run at the interval specified in the cron expression. Schedules will execute in UTC by default, but you can specify a custom timezone. + +--- + +## Getting started + +This section is a list of guides / links to pages to help the user get started using the topic. + +Check out these guides to get started with [CONCEPT]: + +From here, you can: + +- Construct schedules to run partitioned jobs +- Execute jobs in specific timezones +- Learn to test your schedules +- Identify and resolve common issues with our troubleshooting guide + +### Limitations [and notes] + +This section should describe any known limitations that could impact the user, ex: "Schedules will execute in UTC unless a timezone is specified" + +--- + +## Related + +A list of related links and resources diff --git a/docs/docs-beta/content-templates/example-reference.md b/docs/docs-beta/content-templates/example-reference.md new file mode 100644 index 0000000000000..cf63c5932d874 --- /dev/null +++ b/docs/docs-beta/content-templates/example-reference.md @@ -0,0 +1,36 @@ +--- +title: '[TOPIC] example reference' +description: '' +--- + +This reference contains a variety of examples using Dagster. Each example contains: + +- A summary +- Additional notes +- Links to relevant documentation +- A list of the APIs used in the example + +## [Title of example] + +[This example demonstrates [description of what the example accomplishes] + +Example: This example demonstrates how to use resources in schedules. To specify a resource dependency, annotate the resource as a parameter to the schedule's function. + +```python title="my_schedule.py" +@schedule(job=my_job, cron_schedule="* * * * *") +def logs_then_skips(context): + context.log.info("Logging from a schedule!") + return SkipReason("Nothing to do") +``` + +| | | +| -------------------- | --- | +| Notes | | +| Related docs | | +| APIs in this example | | + +--- + +import InspirationList from '../partials/\_InspirationList.md'; + + diff --git a/docs/docs-beta/content-templates/guide-no-steps.md b/docs/docs-beta/content-templates/guide-no-steps.md new file mode 100644 index 0000000000000..ac2d4ac2dc9ad --- /dev/null +++ b/docs/docs-beta/content-templates/guide-no-steps.md @@ -0,0 +1,32 @@ +--- +title: 'Title that briefly describes what the guide is for' +description: 'Description of the guide, useful for SEO and social media links' +--- + +Provide a brief introduction to the how-to guide. View [this article](https://diataxis.fr/how-to-guides/) for more information on how to write effective how-to guides. The intro should be no more than a few sentences. +The title from the frontmatter will be used as the first heading in the guide, you don't need to include it in the intro. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A prerequisite, ex: "Familiarity with [Asset definitions](/concepts/assets)" +- Another prerequisite, ex: "To install this library" +- One more + +
      + +## Title that describes this section + +For section headings: + +- Guides can (and should) contain multiple sections, with each one being a small chunk of information. Break large topics into smaller topics, using subsequent headings (H3, H4, etc) as needed +- Titles should describe an action, ex: "Generate a token" +- Don't use gerunds (-ing) in titles, as it can cause issues with translation + SEO + +## Next steps + +- Add links to related content +- Go deeper into [Understanding Automation](/concepts/understanding-automation) +- Explore [Related Example](/) diff --git a/docs/docs-beta/content-templates/guide-with-steps.md b/docs/docs-beta/content-templates/guide-with-steps.md new file mode 100644 index 0000000000000..6ec672c138c15 --- /dev/null +++ b/docs/docs-beta/content-templates/guide-with-steps.md @@ -0,0 +1,39 @@ +--- +title: 'Title that briefly describes what the guide is for' +description: 'Description of the guide that is useful for SEO and social media links' +--- + +Provide a brief introduction to the how-to guide. View [this article](https://diataxis.fr/how-to-guides/) for more information on how to write effective how-to guides. The intro should be no more than a few sentences. +The title from the frontmatter will be used as the first heading in the guide, you don't need to include it in the intro. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A prerequisite, ex: "Familiarity with [Asset definitions](/concepts/assets)" +- Another prerequisite, ex: "To install this library" +- One more + +
      + +## Step 1: Title that describes what this step will do + +For section / step headings: + +- Titles should describe an action, ex: "Generate a token" +- Don't use gerunds (-ing) in titles, as it can cause issues with translation + SEO +- Each section heading should have an identifier that includes the word 'step' and the number of the step + +### Step 1.1: Title that describes a substep + +If a step would benefit by being broken into smaller steps, follow this section's formatting +Each substep should get an H3 and start with Step N., followed by the number of the substep + +## Step 2: Another step + +## Next steps + +- Add links to related content +- Go deeper into [Understanding Automation](/concepts/understanding-automation) +- Explore [Related Example](/) diff --git a/docs/docs-beta/docs/about/community.md b/docs/docs-beta/docs/about/community.md new file mode 100644 index 0000000000000..830fd0f01dc44 --- /dev/null +++ b/docs/docs-beta/docs/about/community.md @@ -0,0 +1,90 @@ +--- +title: 'Community' +sidebar_position: 10 +--- + +# Community + +No ecosystem is more heterogeneous than data processing. We enthusiastically welcome community contributions to code and to docs, issue, and bug reports on [GitHub](https://www.github.com/dagster-io/dagster/), and questions, feature requests, and discussion in our [Slack](https://dagster.io/slack). + +We're committed to making the Dagster project inclusive and welcoming. Working on this project should feel friendly, collegial, and relaxed for everyone. + +## Open source commitment + +:::tip +Interested in hacking on Dagster? Check out the [Contributing guide](/about/contributing) to get started! +::: + +Dagster itself, defined as the code and intellectual property in the [Dagster public repository](https://www.github.com/dagster-io/dagster/), will forever and always be free. The Dagster core team also builds and operates the commercial product [Dagster+](https://dagster.io/plus) as a distinct and well-defined layer on top of Dagster. We believe that this structure will be the most sustainable for serving our users as the Dagster team and product grows. + +## Code of conduct + +### Our pledge + +As members, contributors, and core team members, we pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible, or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +### Our standards + +Examples of behavior that contribute to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility, apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what's best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting + +### Slack content guidelines + +The Dagster core team will do its best to respond to every message but can't guarantee a response to everyone. Don't treat the community as your own personal customer support service. + +Community members are expected to follow the guidelines for discussion in the Dagster community. Make an effort to contribute quality content so that our community can spend more time hanging out and talking about issues rather than cleaning and filtering our communication channels. + +- Start discussions in the [right channels](https://app.slack.com/client/TCDGQDUKF/browse-channels) +- Craft questions with sufficient detail to reproduce issues or address any concerns +- Use threads for discussions +- Create new discussions, rather than diverting discussions to new directions midstream +- Don't demand attention by cross-posting discussions, sending messages outside threads to bump conversations, or explicitly mentioning `@channel`, `@here`, or community members +- Don't solicit community members about your products and services + +### Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official email address, posting on an official social media account, or acting as an appointed representative at an online or offline event. + +### Enforcement + +#### Responsibilities + +The Dagster core team is responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. For the full list of core team members, refer to [the Dagster Labs team page](https://dagster.io/about). + +The Dagster core team has the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that aren't aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +#### Reporting + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the Dagster core team responsible for enforcement at [conduct@dagsterlabs.com](mailto:conduct@dagsterlabs.com). All complaints will be reviewed and investigated promptly and fairly. The Dagster core team is obligated to respect the privacy and security of the reporter of any incident. + +#### Guidelines + +The Dagster core team will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + +| Level | Community impact | Consequence | +|---|----|----| +| Reminder | Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. | A private, written reminder from the Dagster core team, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. | +| Warning | A violation through a single incident or series of actions. | A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces and external channels like social media. Violating these terms will lead to a permanent ban. | +| Permanent ban | Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. | A permanent ban from any sort of public interaction within the community. | + +### Attribution + +- This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), version 2.0, available at: [Contributor Covenant Code of Conduct](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html). +- Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). +- Content Guidelines were inspired by [Discourse's FAQ for public discussion](https://meta.discourse.org/faq) and [dbt's Rules of the Road](https://docs.getdbt.com/docs/contributing/slack-rules-of-the-road). +- For answers to common questions about this code of conduct, see the [Contributor Covenant FAQ](https://www.contributor-covenant.org/faq), or the [Contributor Covenant Translations](https://www.contributor-covenant.org/translations). diff --git a/docs/docs-beta/docs/about/contributing.md b/docs/docs-beta/docs/about/contributing.md new file mode 100644 index 0000000000000..d4e585d2395d3 --- /dev/null +++ b/docs/docs-beta/docs/about/contributing.md @@ -0,0 +1,5 @@ +--- +title: Contributing +sidebar_position: 20 +unlisted: true +--- diff --git a/docs/docs-beta/docs/about/releases.md b/docs/docs-beta/docs/about/releases.md new file mode 100644 index 0000000000000..4c2bf6db5c19a --- /dev/null +++ b/docs/docs-beta/docs/about/releases.md @@ -0,0 +1,55 @@ +--- +title: 'Releases' +sidebar_position: 30 +--- + +# Releases and compatibility + +We follow [semantic versioning](https://semver.org/) for compatibility between Dagster releases. + +## Dagster core + +Dagster's public, stable APIs won't break within any major release. For example, if a public, stable API exists in Dagster 1.x.y, upgrading to 1.(x+1).y or 1.x.(y+1) shouldn't result in broken code. + +:::tip +If a version breaks your code, help us out by filing an issue on [GitHub](https://github.com/dagster-io/dagster/issues). +::: + +Our public, stable Python API includes: + +- All classes, functions, and variables that are exported at the top-level of the `dagster` package, unless they're marked [experimental](#experimental-apis). +- Public, non-[experimental](#experimental-apis) methods and properties of public, stable classes. Public methods and properties are those included in the [API reference](/todo). Within the codebase, they're marked with a `@public` decorator. + +### Experimental APIs + +The `Experimental` marker allows us to offer new APIs to users and rapidly iterate based on their feedback. Experimental APIs are marked as such in the [API reference](/todo) and usually raise an `ExperimentalWarning` when used. + +Experimental APIs may change or disappear within any release, but we try to avoid breaking them within minor releases if they have been around for a long time. + +### Deprecated APIs + +The `Deprecated` marker indicates that we recommend avoiding an API, usually because there's a preferred option that should be used instead. + +Like non-deprecated public stable APIs, deprecated public stable APIs won't break within any major release after 1.0. + +## Dagster integration libraries + +Dagster's integration libraries haven't yet achieved the same API maturity as Dagster core. For this reason, integration libraries remain on a pre-1.0 versioning track (in general 0.y.z of [semantic versioning](https://semver.org/) and 0.16+ as of Dagster 1.0.0) for the time being. However, 0.16+ library releases remain fully compatible with Dagster 1.x. We will graduate integration libraries one-by-one to the 1.x versioning track as they achieve API maturity. + +While technically the 0.y.z phase of semantic versioning is "anything goes", we're conservative about making changes and will provide guidance about when to expect breaking changes: + +- Upgrading to a new dot version within a minor release, such as 0.8.1 to 0.8.2, should never result in broken code. An exception to this guarantee is [experimental APIs](#experimental-apis). +- As often as possible, deprecation warnings will precede removals. For example, if the current version is 0.8.5 and we want to remove an API, we'll issue a deprecation [warning](https://docs.python.org/3/library/warnings.html) when the API is used and remove it from 0.9.0. +- Upgrading to a new minor version, such as 0.7.5 to 0.8.0, may result in breakages or new deprecation [warnings](https://docs.python.org/3/library/warnings.html). + +## Python version support + +Each Dagster release strives to support the currently active versions of Python. + +When a new version of Python is released, Dagster will work to add support once Dagster's own core dependencies have been updated to support it. **Note**: Some external libraries may not always be compatible with the latest version of Python. + +When a version of Python reaches end of life, Dagster will drop support for it at the next convenient non-patch release. + +## Changelog + +The best way to stay on top of what changes are included in each release is through the [Dagster repository's changelog](https://github.com/dagster-io/dagster/blob/master/CHANGES.md). We call out breaking changes and deprecations in the **Breaking Changes** and **Deprecations** sections. diff --git a/docs/docs-beta/docs/api/.gitignore b/docs/docs-beta/docs/api/.gitignore new file mode 100644 index 0000000000000..dd0812045e517 --- /dev/null +++ b/docs/docs-beta/docs/api/.gitignore @@ -0,0 +1 @@ +**/*.mdx diff --git a/python_modules/libraries/dagster-dbt/dagster_dbt_tests/dagster_dbt_python_test_project/analyses/.gitkeep b/docs/docs-beta/docs/api/.gitkeep similarity index 100% rename from python_modules/libraries/dagster-dbt/dagster_dbt_tests/dagster_dbt_python_test_project/analyses/.gitkeep rename to docs/docs-beta/docs/api/.gitkeep diff --git a/docs/docs-beta/docs/api/index.mdx b/docs/docs-beta/docs/api/index.mdx new file mode 100644 index 0000000000000..5bc60863ec197 --- /dev/null +++ b/docs/docs-beta/docs/api/index.mdx @@ -0,0 +1,8 @@ +--- +sidebar_class_name: hidden +title: API Docs +--- + +import DocCardList from '@theme/DocCardList'; + + diff --git a/docs/docs-beta/docs/changelog.md b/docs/docs-beta/docs/changelog.md new file mode 100644 index 0000000000000..94f067f740a34 --- /dev/null +++ b/docs/docs-beta/docs/changelog.md @@ -0,0 +1,6 @@ +--- +title: "Changelog" +unlisted: true +--- + +# Changelog diff --git a/docs/docs-beta/docs/concepts/assets.md b/docs/docs-beta/docs/concepts/assets.md new file mode 100644 index 0000000000000..9ff5ed162de1f --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets.md @@ -0,0 +1,10 @@ +--- +title: Assets +unlisted: true +--- + +# Assets + +## Assets and ops + +Assets and ops are two different concepts in Dagster. diff --git a/docs/docs-beta/docs/concepts/assets/asset-checks.md b/docs/docs-beta/docs/concepts/assets/asset-checks.md new file mode 100644 index 0000000000000..328dac16243a4 --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets/asset-checks.md @@ -0,0 +1,7 @@ +--- +title: "Asset checks" +sidebar_position: 70 +unlisted: true +--- + +# Asset checks diff --git a/docs/docs-beta/docs/concepts/assets/asset-dependencies.md b/docs/docs-beta/docs/concepts/assets/asset-dependencies.md new file mode 100644 index 0000000000000..2089ba4680d50 --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets/asset-dependencies.md @@ -0,0 +1,7 @@ +--- +title: "Asset dependencies" +sidebar_position: 30 +unlisted: true +--- + +# Asset dependencies diff --git a/docs/docs-beta/docs/concepts/assets/asset-materialization.md b/docs/docs-beta/docs/concepts/assets/asset-materialization.md new file mode 100644 index 0000000000000..01ffb57146241 --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets/asset-materialization.md @@ -0,0 +1,7 @@ +--- +title: "Asset materialization" +sidebar_position: 20 +unlisted: true +--- + +# Asset materialization diff --git a/docs/docs-beta/docs/concepts/assets/asset-metadata.md b/docs/docs-beta/docs/concepts/assets/asset-metadata.md new file mode 100644 index 0000000000000..5f707d5641247 --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets/asset-metadata.md @@ -0,0 +1,7 @@ +--- +title: "Asset metadata" +sidebar_position: 40 +unlisted: true +--- + +# Asset metadata diff --git a/docs/docs-beta/docs/concepts/assets/thinking-in-assets.md b/docs/docs-beta/docs/concepts/assets/thinking-in-assets.md new file mode 100644 index 0000000000000..fbbbcde65fe96 --- /dev/null +++ b/docs/docs-beta/docs/concepts/assets/thinking-in-assets.md @@ -0,0 +1,7 @@ +--- +title: "Think in assets" +sidebar_position: 10 +unlisted: true +--- + +# Think in assets diff --git a/docs/docs-beta/docs/concepts/automation.md b/docs/docs-beta/docs/concepts/automation.md new file mode 100644 index 0000000000000..58a8cfafe497f --- /dev/null +++ b/docs/docs-beta/docs/concepts/automation.md @@ -0,0 +1,66 @@ +--- +title: About Automation +--- + +There are several ways to automate the execution of your data pipelines with Dagster. + +The first system, and the most basic, is the [Schedule](/guides/schedules), which responds to time. + +[Sensors](/guides/sensors) are like schedules, but they respond to an external event defined by the user. + +[Asset Sensors](/guides/asset-sensors) are a special case of sensor that responds to changes in asset materialization +as reported by the Event Log. + +Finally, the Declarative Automation system is a +more complex system that uses conditions on the assets to determine when to execute. + +## Schedules + +In Dagster, a schedule is defined by the `ScheduleDefinition` class, or through the `@schedule` decorator. The `@schedule` +decorator is more flexible than the `ScheduleDefinition` class, allowing you to configure job behavior or emit log messages +as the schedule is processed. + +Schedules were one of the first types of automation in Dagster, created before the introduction of Software-Defined Assets. +As such, you may find that many of the examples can seem foreign if you are used to only working within the asset framework. + +For more on how assets and ops inter-relate, read about [Assets and Ops](/concepts/assets#assets-and-ops) + +The `dagster-daemon` process is responsible for submitting runs by checking each schedule at a regular interval to determine +if it's time to execute the underlying job. + +A schedule can be thought of as a wrapper around two pieces: + +- A `JobDefinition`, which is a set of assets to materialize or ops to execute. +- A `cron` string, which describes the schedule. + +### Define a schedule using `ScheduleDefinition` + +```python +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", +) +``` + +By default, schedules aren't enabled. You can enable them by visiting the Automation tab and toggling the schedule, +or set a default status to `RUNNING` when you define the schedule. + +```python +ecommerce_schedule = ScheduleDefinition( + job=ecommerce_job, + cron_schedule="15 5 * * 1-5", + default_status=DefaultScheduleStatus.RUNNING, +) +``` + +### Define a schedule using `@schedule` + +If you want more control over the schedule, you can use the `@schedule` decorator. In doing so, you are then responsible for either +emitting a `RunRequest` or a `SkipReason`. You can also emit logs, which will be visible in the Dagster UI for a given schedule's tick history. + +```python +@schedule(cron_schedule="15 5 * * 1-5") +def ecommerce_schedule(context): + context.log.info("This log message will be visible in the Dagster UI.") + return RunRequest() +``` diff --git a/docs/docs-beta/docs/concepts/automation/declarative-automation.md b/docs/docs-beta/docs/concepts/automation/declarative-automation.md new file mode 100644 index 0000000000000..1e7221492f11c --- /dev/null +++ b/docs/docs-beta/docs/concepts/automation/declarative-automation.md @@ -0,0 +1,7 @@ +--- +title: "Declarative Automation" +sidebar_position: 10 +unlisted: true +--- + +# Declarative Automation diff --git a/docs/docs-beta/docs/concepts/automation/schedules.md b/docs/docs-beta/docs/concepts/automation/schedules.md new file mode 100644 index 0000000000000..25867d5cbee78 --- /dev/null +++ b/docs/docs-beta/docs/concepts/automation/schedules.md @@ -0,0 +1,3 @@ +--- +unlisted: true +--- diff --git a/docs/docs-beta/docs/concepts/automation/sensors.md b/docs/docs-beta/docs/concepts/automation/sensors.md new file mode 100644 index 0000000000000..6ede003072419 --- /dev/null +++ b/docs/docs-beta/docs/concepts/automation/sensors.md @@ -0,0 +1,7 @@ +--- +title: "Sensors" +sidebar_position: 20 +unlisted: true +--- + +# Sensors diff --git a/docs/docs-beta/docs/concepts/execution.md b/docs/docs-beta/docs/concepts/execution.md new file mode 100644 index 0000000000000..01f5affa3b030 --- /dev/null +++ b/docs/docs-beta/docs/concepts/execution.md @@ -0,0 +1,5 @@ +--- +unlisted: true +--- + +# Execution diff --git a/docs/docs-beta/docs/concepts/execution/dagster-daemon.md b/docs/docs-beta/docs/concepts/execution/dagster-daemon.md new file mode 100644 index 0000000000000..e19cbb520dbeb --- /dev/null +++ b/docs/docs-beta/docs/concepts/execution/dagster-daemon.md @@ -0,0 +1,7 @@ +--- +title: "Dagster daemon" +sidebar_position: 10 +unlisted: true +--- + +# Dagster daemon diff --git a/docs/docs-beta/docs/concepts/execution/run-coordinators.md b/docs/docs-beta/docs/concepts/execution/run-coordinators.md new file mode 100644 index 0000000000000..aa871f1e371ed --- /dev/null +++ b/docs/docs-beta/docs/concepts/execution/run-coordinators.md @@ -0,0 +1,7 @@ +--- +title: "Run coordinators" +sidebar_position: 40 +unlisted: true +--- + +# Run coordinators diff --git a/docs/docs-beta/docs/concepts/execution/run-executors.md b/docs/docs-beta/docs/concepts/execution/run-executors.md new file mode 100644 index 0000000000000..ff006420ac983 --- /dev/null +++ b/docs/docs-beta/docs/concepts/execution/run-executors.md @@ -0,0 +1,7 @@ +--- +title: "Run executors" +sidebar_position: 30 +unlisted: true +--- + +# Run executors diff --git a/docs/docs-beta/docs/concepts/execution/run-launchers.md b/docs/docs-beta/docs/concepts/execution/run-launchers.md new file mode 100644 index 0000000000000..f08630d093ebb --- /dev/null +++ b/docs/docs-beta/docs/concepts/execution/run-launchers.md @@ -0,0 +1,7 @@ +--- +title: "Run launchers" +sidebar_position: 20 +unlisted: true +--- + +# Run launchers diff --git a/docs/docs-beta/docs/concepts/io-managers.md b/docs/docs-beta/docs/concepts/io-managers.md new file mode 100644 index 0000000000000..d9494abca542b --- /dev/null +++ b/docs/docs-beta/docs/concepts/io-managers.md @@ -0,0 +1,4 @@ +--- +title: "I/O managers" +unlisted: true +--- diff --git a/docs/docs-beta/docs/concepts/ops-jobs.md b/docs/docs-beta/docs/concepts/ops-jobs.md new file mode 100644 index 0000000000000..4382fa8efa15d --- /dev/null +++ b/docs/docs-beta/docs/concepts/ops-jobs.md @@ -0,0 +1,5 @@ +--- +unlisted: true +--- + +# Ops and jobs diff --git a/docs/docs-beta/docs/concepts/ops-jobs/job-configuration.md b/docs/docs-beta/docs/concepts/ops-jobs/job-configuration.md new file mode 100644 index 0000000000000..23be86378acca --- /dev/null +++ b/docs/docs-beta/docs/concepts/ops-jobs/job-configuration.md @@ -0,0 +1,7 @@ +--- +title: "Job configuration" +sidebar_position: 10 +unlisted: true +--- + +# Job configuration diff --git a/docs/docs-beta/docs/concepts/ops-jobs/ops-vs-assets.md b/docs/docs-beta/docs/concepts/ops-jobs/ops-vs-assets.md new file mode 100644 index 0000000000000..81795aa23e439 --- /dev/null +++ b/docs/docs-beta/docs/concepts/ops-jobs/ops-vs-assets.md @@ -0,0 +1,7 @@ +--- +title: "Ops vs. assets" +sidebar_position: 10 +unlisted: true +--- + +# Ops vs. assets diff --git a/docs/docs-beta/docs/concepts/partitions.md b/docs/docs-beta/docs/concepts/partitions.md new file mode 100644 index 0000000000000..c6219e4be7878 --- /dev/null +++ b/docs/docs-beta/docs/concepts/partitions.md @@ -0,0 +1,6 @@ +--- +title: "Partitions" +unlisted: true +--- + +# Partitions diff --git a/docs/docs-beta/docs/concepts/resources.md b/docs/docs-beta/docs/concepts/resources.md new file mode 100644 index 0000000000000..08cdb3b01d061 --- /dev/null +++ b/docs/docs-beta/docs/concepts/resources.md @@ -0,0 +1,5 @@ +--- +unlisted: true +--- + +# Resources diff --git a/examples/docs_snippets/docs_snippets/tutorial/saving/__init__.py b/docs/docs-beta/docs/concepts/schedules.mdx similarity index 100% rename from examples/docs_snippets/docs_snippets/tutorial/saving/__init__.py rename to docs/docs-beta/docs/concepts/schedules.mdx diff --git a/docs/docs-beta/docs/concepts/sensors.md b/docs/docs-beta/docs/concepts/sensors.md new file mode 100644 index 0000000000000..25867d5cbee78 --- /dev/null +++ b/docs/docs-beta/docs/concepts/sensors.md @@ -0,0 +1,3 @@ +--- +unlisted: true +--- diff --git a/docs/docs-beta/docs/concepts/understanding-assets.md b/docs/docs-beta/docs/concepts/understanding-assets.md new file mode 100644 index 0000000000000..b19ef79420905 --- /dev/null +++ b/docs/docs-beta/docs/concepts/understanding-assets.md @@ -0,0 +1,8 @@ +--- +title: About assets +description: Understanding the concept of assets in Dagster +last_update: + date: 2024-08-11 + author: Pedram Navid +unlisted: true +--- diff --git a/docs/docs-beta/docs/dagster-plus.md b/docs/docs-beta/docs/dagster-plus.md new file mode 100644 index 0000000000000..61c264492f0eb --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+" +displayed_sidebar: "dagsterPlus" +--- + +# Dagster+ diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication.md b/docs/docs-beta/docs/dagster-plus/access/authentication.md new file mode 100644 index 0000000000000..a97dee5f69a1c --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication.md @@ -0,0 +1,7 @@ +--- +title: "Authentication" +displayed_sidebar: "dagsterPlus" +unlisted: true +--- + +# Authentication diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/azure-ad-sso.md b/docs/docs-beta/docs/dagster-plus/access/authentication/azure-ad-sso.md new file mode 100644 index 0000000000000..316f793c67ca0 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/azure-ad-sso.md @@ -0,0 +1,95 @@ +--- +title: 'Azure Active Directory SSO' +displayed_sidebar: 'dagsterPlus' +--- + +# Setting up Azure Active Directory SSO for Dagster+ + +In this guide, you'll configure Azure Active Directory (AD) to use single sign-on (SSO) with your Dagster+ organization. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- **An existing Azure AD account** +- **To install the [`dagster-cloud` CLI](/todo)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/todo) + - [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions) in your organization + +
      + +## Step 1: add the Dagster+ app in Azure AD \{#dagster-app} + +In this step, you'll add the Dagster+ app to your list of managed SaaS apps in Azure AD. + +1. Sign in to the Azure portal. +2. On the left navigation pane, click the **Azure Active Directory** service. +3. Navigate to **Enterprise Applications** and then **All Applications**. +4. Click **New application**. +5. In the **Add from the gallery** section, type **Dagster+** in the search box. +6. Select **Dagster+** from the results panel and then add the app. Wait a few seconds while the app is added to your tenant. + +## Step 2: configure SSO in Azure AD \{#configure-sso} + +In this step, you'll configure and enable SSO for Azure AD in your Azure portal. + +1. On the **Dagster+** application integration page, locate the **Manage** section and select **single sign-on**. +2. On the **Select a single sign-on method** page, select **SAML**. +3. On the **Set up single sign-on with SAML** page, click the pencil icon for **Basic SAML Configuration** to edit the settings. + ![Settings Dropdown](/img/placeholder.svg) +4. In the **Basic SAML Configuration** section, fill in the **Identifier** and **Reply URL** fields as follows: + + Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + ``` + https://.dagster.cloud/auth/saml/consume + ``` + +5. Click **Set additional URLs**. +6. In the **Sign-on URL** field, copy and paste the URL you entered in the **Identifier** and **Reply URL** fields. +7. Next, you'll configure the SAML assertions. In addition to the default attributes, Dagster+ requires the following: + + - `FirstName` - `user.givenname` + - `LastName` - `user.surname` + - `Email` - `user.userprincipalname` + + Add these attribute mappings to the SAML assertion. +8. On the **Set up single sign-on with SAML** page: + 1. Locate the **SAML Signing Certificate** section. + 2. Next to **Federation Metadata XML**, click **Download**: + + ![Download SAML Certificate](/img/placeholder.svg) + + When prompted, save the SAML metadata file to your computer. + +## Step 3: upload the SAML metadata to Dagster+ \{#upload-saml} + +After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + +```shell +dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud +``` + +## Step 4: create a test user \{#test-user} + +In this section, you'll create a test user in the Azure portal. + +1. From the left pane in the Azure portal, click **Azure Active Directory**. +2. Click **Users > All users**. +3. Click **New user** at the top of the screen. +4. In **User** properties, fill in the following fields: + - **Name**: Enter `B.Simon`. + - **User name**: Enter `B.Simon@contoso.com`. + - Select the **Show password** checkbox and write down the value displayed in the **Password** box. +5. Click **Create**. + +import TestSSO from '../../../partials/\_TestSSO.md'; + + + +Click **Test this application** in the Azure portal. If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/google-workspace-sso.md b/docs/docs-beta/docs/dagster-plus/access/authentication/google-workspace-sso.md new file mode 100644 index 0000000000000..d9a019c4c5d91 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/google-workspace-sso.md @@ -0,0 +1,110 @@ +--- +title: 'Google Workspace SSO' +displayed_sidebar: 'dagsterPlus' +--- + +# Setting up Google Workspace SSO for Dagster+ + +In this guide, you'll configure Google Workspace to use single sign-on (SSO) with your Dagster+ organization. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- **The following in Google**: + - An existing Google account + - [Workspace Admin permissions](https://support.google.com/a/answer/6365252?hl=en&ref_topic=4388346) +- **To install the [`dagster-cloud` CLI](/todo)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/todo) + - [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions) in your organization + +
      + +## Step 1: Add the Dagster+ app in Google Workspace \{#dagster-app} + +1. Navigate to your [Google Admin Console](https://admin.google.com). +2. Using the sidebar, navigate to **Apps > Web and mobile apps**: + + ![Google Workspace Sidebar](/img/placeholder.svg) + +3. On the **Web and mobile apps** page, click **Add App > Add custom SAML app**: + ![Add App](/img/placeholder.svg) + This opens a new page for adding app details. + +## Step 2: Configure SSO in Google Workspace \{#configure-sso} + +1. On the **App details** page: + 1. Fill in the **App name** field. + 2. Fill in the **Description** field. + + The page should look similar to the following: + + ![Application Details](/img/placeholder.svg) + + 3. Click **Continue**. + +2. On the **Google Identity Provider details** page, click **Continue**. No action is required for this page. +3. On the **Service provider details** page: + 1. In the **ACS URL** and **Entity ID** fields: + + Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + ``` + https://.dagster.cloud/auth/saml/consume + ``` + + 2. Check the **Signed Response** box. The page should look similar to the image below. In this example, the organization's name is `hooli` and the Dagster+ domain is `https://hooli.dagster.cloud`: + + ![Service Provider Details](/img/placeholder.svg) + + 3. When finished, click **Continue**. +4. On the **Attributes** page: + 1. Click **Add mapping** to add and configure the following attributes: + + - **Basic Information > First Name** - `FirstName` + - **Basic Information > Last Name** - `LastName` + - **Basic Information > Email** - `Email` + + The page should look like the following image: + + ![Attribute Mapping](/img/placeholder.svg) + + 2. Click **Finish**. + +## Step 3: Upload the SAML metadata to Dagster+ \{#upload-saml} + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In your Google Workspace, open the Dagster+ application you added in [Step 2](#configure-sso). +2. Click **Download metadata**: + + ![SAML Metadata](/img/placeholder.svg) + +3. In the modal that displays, click **Download metadata** to start the download. Save the file to your computer. +4. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +## Step 4: Grant access to users \{#grant-access} + +In this step, you'll assign users in your Google Workspace to the Dagster+ application. This allows members of the workspace to log in to Dagster+ using their credentials when the single sign-on flow is initiated. + +1. In the Google Workspace Dagster+ application, click **User access**. +2. Select an organizational unit. +3. Click **ON for everyone**. +4. Click **Save**. + + ![Assign New Login](/img/placeholder.svg) + +import TestSSO from '../../../partials/\_TestSSO.md'; + + + +In the Google Workspace portal, click the **Dagster+ icon**. If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/okta-scim.md b/docs/docs-beta/docs/dagster-plus/access/authentication/okta-scim.md new file mode 100644 index 0000000000000..d7ecf4beb9c22 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/okta-scim.md @@ -0,0 +1,7 @@ +--- +title: 'Okta SCIM provisioning' +displayed_sidebar: 'dagsterPlus' +unlisted: true +--- + +# Okta SCIM provisioning diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/okta-sso.md b/docs/docs-beta/docs/dagster-plus/access/authentication/okta-sso.md new file mode 100644 index 0000000000000..1fadb6e5c4c5d --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/okta-sso.md @@ -0,0 +1,94 @@ +--- +title: 'Okta SSO' +displayed_sidebar: 'dagsterPlus' +--- + +# Setting up Okta SSO for Dagster+ + +In this guide, you'll configure Okta to use single sign-on (SSO) with your Dagster+ organization. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- **An existing Okta account** +- **To install the [`dagster-cloud` CLI](/todo)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/todo) + - [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions) in your organization + +
      + + +## Step 1: Add the Dagster+ app in Okta \{#dagster-app} + +1. Sign in to your Okta Admin Dashboard. +2. Using the sidebar, click **Applications > Applications**. +3. On the **Applications** page, click **Browse App Catalog**. +4. On the **Browse App Integration Catalog** page, search for `Dagster Cloud`. +5. Add and save the application. + + +## Step 2: Configure SSO in Okta \{#configure-sso} + +1. In Okta, open the Dagster Cloud application and navigate to its **Sign On Settings**. +2. Scroll down to the **Advanced Sign-on settings** section. +3. In the **Organization** field, enter your Dagster+ organization name. This is used to route the SAML response to the correct Dagster+ subdomain. + + For example, your organization name is `hooli` and your Dagster+ domain is `https://hooli.dagster.cloud`. To configure this correctly, you'd enter `hooli` into the **Organization** field: + + ![Okta Subdomain Configuration](/img/placeholder.svg) + +4. When finished, click **Done**. + + +## Step 3: Upload the SAML metadata to Dagster+ \{#upload-saml} + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In the **Sign On Settings**, navigate to the **SAML Signing Certificates** section. +2. Click the **Actions** button of the **Active** certificate. +3. Click **View IdP metadata**: + + ![Okta IdP metadata options](/img/placeholder.svg) + + This will open a new page in your browser with the IdP metadata in XML format. + +4. Right-click the page and use **Save As** or **Save Page As**: + + ![Save IdP metadata as XML](/img/placeholder.svg) + + In Chrome and Edge, the file will be downloaded as an XML file. In Firefox, choose **Save Page As > Save as type**, then select **All files**. + + :::note + Copying and pasting the metadata can cause formatting issues that will prevent successful setup. Saving the page directly from the browser will avoid this. + ::: + +5. After you've downloaded the metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + + +## Step 4: Grant access to users \{#grant-access} + +Next, you'll assign users to the Dagster+ application in Okta. This will allow them to log in using their Okta credentials when the single sign-on flow is initiated. + +1. In the Dagster+ application, navigate to **Assignments**. +2. Click **Assign > Assign to People**. +3. For each user you want to have access to Dagster+, click **Assign** then **Save and Go Back**. + +import TestSSO from '../../../partials/\_TestSSO.md'; + + + +In the Okta **Applications** page, click the **Dagster+** icon: + +![Okta idP Login](/img/placeholder.svg) + +If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/onelogin-sso.md b/docs/docs-beta/docs/dagster-plus/access/authentication/onelogin-sso.md new file mode 100644 index 0000000000000..d15f9576acb62 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/onelogin-sso.md @@ -0,0 +1,91 @@ +--- +title: 'OneLogin SSO' +displayed_sidebar: 'dagsterPlus' +--- + +# OneLogin SSO + +In this guide, you'll configure OneLogin to use single sign-on (SSO) with your Dagster+ organization. + + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **The following in OneLogin:** + - An existing OneLogin account + - Admin permissions +- **To install the [`dagster-cloud` CLI](/todo)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/todo) + - [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions) in your organization + +
      + + +## Step 1: Add the Dagster+ app in OneLogin \{#dagster-app} + +1. Sign into your OneLogin portal. +2. Navigate to **Administration > Applications**. +3. On the **Applications** page, click **Add App**. +4. On the **Find Applications** page, search for `Dagster+`: + + ![Find Applications in OneLogin](/img/placeholder.svg) + +5. Add and save the application. + + + +## Step 2: Configure SSO in OneLogin \{#configure-sso} + +1. In OneLogin, open the application and navigate to its **Configuration**. +2. In the **Dagster+ organisation name** field, enter your Dagster+ organization name. This is used to route the SAML response to the correct Dagster+ subdomain. + + For example, your organization name is `hooli` and your Dagster+ domain is `https://hooli.dagster.cloud`. To configure this correctly, you'd enter `hooli` into the **Subdomain** field. +3. When finished, click **Done**. + + + +## Step 3: Upload the SAML metadata to Dagster+ \{#upload-saml} + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In OneLogin, open the Dagster+ application. +2. Navigate to **More Actions > SAML Metadata**. +3. When prompted, save the file to your computer. +4. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + + +## Step 4: Grant access to users \{#grant-access} + +Next, you'll assign users to the Dagster+ application in OneLogin. This will allow them to log in using their OneLogin credentials with the sign in flow is initiated. + +1. In Okta, navigate to **Users**. +2. Select a user. +3. On the user's page, click **Applications**. +4. Assign the user to Dagster+. In the following image, the user `Test D'Test` has been assigned to Dagster+: + + ![Screenshot of Assign New Login in OneLogin](/img/placeholder.svg) + +5. Click **Continue**. +6. Click **Save User.** +7. Repeat steps 2-6 for every user you want to access Dagster+. + + +import TestSSO from '../../../partials/\_TestSSO.md'; + + + +In the OneLogin portal, click the Dagster+ icon: + +![Screenshot of the Dagster+ icon in OneLogin](/img/placeholder.svg) + +If successful, you'll be automatically signed into your Dagster+ organization. diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/pingone-sso.md b/docs/docs-beta/docs/dagster-plus/access/authentication/pingone-sso.md new file mode 100644 index 0000000000000..c4ef93b0d0ee4 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/pingone-sso.md @@ -0,0 +1,120 @@ +--- +title: 'PingOne SSO' +displayed_sidebar: 'dagsterPlus' +--- + +# Setting up PingOne SSO for Dagster+ + +In this guide, you'll configure PingOne to use single sign-on (SSO) with your Dagster+ organization. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- **The following in PingOne:** + - An existing PingOne account + - Organization admin permissions +- **To install the [`dagster-cloud` CLI](/todo)** +- **The following in Dagster+:** + - A Pro plan + - [Access to a user token](/todo) + - [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions) in your organization + +
      + +## Step 1: Add the Dagster+ app in PingOne \{#dagster-app} + +1. Sign into your PingOne Console. +2. Using the sidebar, click **Connections > Applications**. + + ![PingOne Sidebar](/img/placeholder.svg) + +3. On the **Applications** page, add an application. +4. In **Select an application type**, click **Web app**. +5. Click **SAML > Configure**: + + ![Add App](/img/placeholder.svg) + +## Step 2: Configure SSO in PingOne \{#configure-sso} + +1. In the **Create App Profile** page: + + 1. Add an application name, description, and icon: + + ![Application Details](/img/placeholder.svg) + + 2. When finished, click **Save and Continue.** + +2. In the **Configure SAML** page: + + 1. Fill in the following: + + - **ACS URLs** and **Entity ID**: Copy and paste the following URL, replacing `` with your Dagster+ organization name: + + ``` + https://.dagster.cloud/auth/saml/consume + ``` + + - **Assertion Validity Duration**: Type `60`. + In the following example, the organization's name is `hooli` and the Dagster+ domain is `https://hooli.dagster.cloud`: + + ![Service Provider Details](/img/placeholder.svg) + + 2. When finished, click **Save and Continue.** + +3. In the **Map Attributes** page: + + 1. Configure the following attributes: + + | Application attribute | Outgoing value | + | --------------------- | -------------- | + | Email | Email Address | + | FirstName | Given Name | + | LastName | Family Name | + + The page should look similar to the following: + + ![Attribute Mapping](/img/placeholder.svg) + + 2. When finished, click **Save and Continue.** + +## Step 3: Upload the SAML metadata to Dagster+ \{#upload-saml} + +Next, you'll save and upload the application's SAML metadata to Dagster+. This will enable single sign-on. + +1. In PingOne, open the Dagster+ application. +2. Click the **Configuration** tab. +3. In the **Connection Details** section, click **Download Metadata**: + + ![SAML Metadata](/img/placeholder.svg) + +4. When prompted, save the file to your computer. +5. After you've downloaded the SAML metadata file, upload it to Dagster+ using the `dagster-cloud` CLI: + + ```shell + dagster-cloud organization settings saml upload-identity-provider-metadata \ + --api-token= \ + --url https://.dagster.cloud + ``` + +## Step 4: Grant access to users \{#grant-access} + +Next, you'll assign users to the Dagster+ application in PingOne. This will allow them to log in using their PingOne credentials when the single sign-on flow is initiated. + +1. In the Dagster+ application, click the **Access** tab. +2. Click the **pencil icon** to edit the **Group membership policy**: + + ![Assign New Login](/img/placeholder.svg) + +3. Edit the policy as needed to grant users access to the application. + +import TestSSO from '../../../partials/\_TestSSO.md'; + + + +In the PingOne application portal, click the **Dagster+** icon: + +![Identity Provider Login](/img/placeholder.svg) + +If successful, you'll be automatically signed in to your Dagster+ organization. diff --git a/docs/docs-beta/docs/dagster-plus/access/authentication/scim-provisioning.md b/docs/docs-beta/docs/dagster-plus/access/authentication/scim-provisioning.md new file mode 100644 index 0000000000000..6e45a0ddb65f0 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/authentication/scim-provisioning.md @@ -0,0 +1,7 @@ +--- +title: 'Utilizing SCIM provisioning' +displayed_sidebar: 'dagsterPlus' +unlisted: true +--- + +# Utilizing SCIM provisioning diff --git a/docs/docs-beta/docs/dagster-plus/access/rbac.md b/docs/docs-beta/docs/dagster-plus/access/rbac.md new file mode 100644 index 0000000000000..32098f061b1a3 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/rbac.md @@ -0,0 +1,7 @@ +--- +title: "Role-based Access Control" +displayed_sidebar: "dagsterPlus" +unlisted: true +--- + +# Role-based Access Control diff --git a/docs/docs-beta/docs/dagster-plus/access/rbac/audit-logs.md b/docs/docs-beta/docs/dagster-plus/access/rbac/audit-logs.md new file mode 100644 index 0000000000000..fc574334a960a --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/rbac/audit-logs.md @@ -0,0 +1,68 @@ +--- +title: "Audit logs" +displayed_sidebar: "dagsterPlus" +sidebar_position: 4 +--- + +# Audit logs + +The Dagster+ audit log enables Dagster+ Pro organizations to track and attribute changes to their Dagster deployment. + +For large organizations, tracking down when and by whom changes were made can be crucial for maintaining security and compliance. The audit log is also valuable + for tracking operational history, including sensor and schedule updates. + +This guide walks through how to access the audit log and details the interactions which are tracked in the audit log. + +
      +Prerequisites +- A Dagster+ Pro organization +- An [Organization Admin](/dagster-plus/access/rbac/user-roles-permissions) role in your Dagster+ organization +
      + +## View audit logs + +To access the audit logs: + +1. Click your user icon at the top right corner of the page. +2. Click **Organization settings**. +3. Click the **Audit log** tab. + +:::warning + +Add screenshot + +::: + +Each entry in the audit log indicates when an action was taken, the user who performed the action, the action taken, and the deployment which the action affected. To view additional details for an action, click the **Show** button. + +## Filter the audit log + +The **Filter** button near the top left of the page can be used to filter the list of logs. You can filter to a combination of user, event type, affected deployment, or time frame. + +## Audit log entry types + +| Event type | Description | Additional details | +|--------------------------------|---------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------| +| Log in | A user logs in to the Dagster+ organization | | +| Update sensor | A user toggles a sensor on or off | The sensor name, code location, and cursor | +| Update schedule | A user toggles a schedule on or off | The schedule name, code location, and cursor | +| Update alert policy | A user modifies an [alert policy](/dagster-plus/deployment/alerts/ui) | The new configuration for the alert policy | +| Create deployment | A user creates a new deployment | Whether the deployment is a branch deployment | +| Delete deployment | A user removes an existing deployment | Whether the deployment is a branch deployment | +| Create user token | A user creates a new user token | | +| Revoke user token | A user revokes an existing user token | | +| Change user permissions | A user alters [permissions](/dagster-plus/access/rbac/user-roles-permissions) for another user | The permission grant and targeted deployment | +| Create agent token | A user creates a new agent token | | +| Revoke agent token | A user revokes an existing agent token | | +| Update agent token permissions | A user alters [permissions](/dagster-plus/access/rbac/user-roles-permissions) for an agent token | The permission grant and targeted deployment | +| Create secret | A user creates a new [environment variable](/dagster-plus/deployment/environment-variables/dagster-ui) | The created variable name | +| Update secret | A user modifies an existing [environment variable](/dagster-plus/deployment/environment-variables/dagster-ui) | The previous and current variable names and whether the value was changed | +| Delete secret | A user removes an [environment variable](/dagster-plus/deployment/environment-variables/dagster-ui) | The deleted variable name | +| Update subscription | A user modifies the selected Dagster+ subscription for the organization | The previous and current plan types | + +## Programmatic access to audit logs + +Audit logs can be accessed programmatically over the Dagster+ GraphQL API. You can access a visual GraphiQL interface +by navigating to `https://.dagster.cloud//graphql` in your browser. You can also query the API directly using the Python client. + + diff --git a/docs/docs-beta/docs/dagster-plus/access/rbac/teams.md b/docs/docs-beta/docs/dagster-plus/access/rbac/teams.md new file mode 100644 index 0000000000000..e45bb82c261a1 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/rbac/teams.md @@ -0,0 +1,78 @@ +--- +title: "Team management" +displayed_sidebar: "dagsterPlus" +sidebar_position: 2 +--- + +# Team management in Dagster+ + +As part of [role-based access control (RBAC)](/dagster-plus/access/rbac/user-roles-permissions), Dagster+ supports the ability to assign users to teams. A team is a group of users with a set of default deployment, code location, and Branch Deployment user roles. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ Pro plan +- Dagster+ [Organization Admin permissions](/dagster-plus/access/rbac/user-roles-permissions): + - In your organization, and + - For the deployments where you want to manage teams + +
      + + +## Adding teams + +1. In the Dagster+ UI, click the **user menu (your icon) > Organization Settings**. +2. Click the **Teams** tab. +3. Click the **Create a team** button. +4. In the window that displays, enter a name in the **Team name** field. +5. Click **Create team**. + +After the team is created, you can [add team members](#adding-team-members) and [assign user roles to deployments](#managing-team-roles). + +## Adding team members + +Navigate to the **Organization Settings > Teams** tab and locate the team you want to add team members to. Then: + +1. Click the **Edit** button in the **Actions** column. +2. In the **Members** tab, use the search bar to locate a user in your organization. +3. Once located, click the user. +4. Click **Add user to team**. +5. Repeat as needed, clicking **Done** when finished. + +## Removing team members + +Navigate to the **Organization Settings > Teams** tab and locate the team you want to remove team members from. Then: + +1. Click the **Edit** button in the **Actions** column. +2. In the **Members** tab, locate the user in the list of team members. +3. Click **Remove from team**. +4. Repeat as needed, clicking **Done** when finished. + +## Managing team roles + +Navigate to the **Organization Settings > Teams** tab and locate the team you want to manage roles for. Then: + +1. Click the **Edit** button in the **Actions** column. +2. In the **Roles** tab, click the **Edit team role** button next to the deployment where you want to modify the team's role. +3. In the window that displays, select the team role for the deployment. This [role](/dagster-plus/access/rbac/user-roles-permissions) will be used as the default for this team for all code locations in the deployment. +4. Click **Save**. +5. To set permissions for individual [code locations](/dagster-plus/access/rbac/user-roles-permissions) in a deployment: + 1. Click the toggle to the left of the deployment to open a list of code locations. + 2. Next to a code location, click **Edit team role**. + 3. Select the team role for the code location. + 4. Click **Save**. + +## Removing teams + +Navigate to the **Organization Settings > Teams** tab and locate the team you want to remove. Then: + +1. Click the **Edit** button in the **Actions** column. +2. In the modal that displays, click the **Delete team** button. +3. When prompted, confirm the deletion. + +## Next steps + +- Learn more about RBAC in [Understanding User Roles & Permissions](/dagster-plus/access/rbac/user-roles-permissions) +- Learn more about how to manage users in Dagster+ in [Understanding User Management in Dagster+](/dagster-plus/access/rbac/users) diff --git a/docs/docs-beta/docs/dagster-plus/access/rbac/user-roles-permissions.md b/docs/docs-beta/docs/dagster-plus/access/rbac/user-roles-permissions.md new file mode 100644 index 0000000000000..c8e94ca526d51 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/rbac/user-roles-permissions.md @@ -0,0 +1,211 @@ +--- +title: 'User roles & permissions' +displayed_sidebar: 'dagsterPlus' +sidebar_position: 3 +--- + +# Understanding user roles & permissions in Dagster+ + +Role-based access control (RBAC) enables you to grant specific permissions to users in your organization, ensuring that Dagster users have access to what they require in Dagster+, and no more. + +In this guide, we'll cover how RBAC works in Dagster+, how to assign roles to users, and the granular permissions for each user role. + +
      + Prerequisites + +To complete the steps in this guide, you'll need: + +- A Dagster+ account + - Additionally, in certain cases listed below, a Dagster+ Pro plan + +
      + +## Dagster+ user roles + +Dagster+ uses a hierarchical model for RBAC, meaning that the most permissive roles include permissions from the roles beneath them. The following user roles are currently supported, in order from the **most** permissive to the **least** permissive: + +- Organization Admin +- Admin +- Editor +- Launcher (Pro plans only) +- Viewer + +For example, the **Admin** user role includes permissions specific to this role and all permissions in the **Editor**, **Launcher**, and **Viewer** user roles. Refer to the [User permissions reference](#user-permissions-reference) for the full list of user permissions in Dagster+. + +### User role enforcement + +All user roles are enforced both in Dagster+ and the GraphQL API. + +### Teams + +Dagster+ Pro users can create teams of users and assign default permission sets. Refer to the [Managing teams in Dagster+](/dagster-plus/access/rbac/teams) guide for more info. + +## Assigning user and team roles + +With the exception of the **Organization Admin** role, user and team roles are set on a per-deployment basis. + +Organization Admins have access to the entire organization, including all [deployments](/todo), [code locations](/dagster-plus/deployment/code-locations), and [Branch Deployments](/dagster-plus/deployment/branch-deployments). + +| Level | Plan | Description | +| ------------------ | --------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Deployment | All plans | Defines the level of access for a given deployment. Roles set at this level will be the default role for the user or team for all code locations in the deployment.

      Note: Granting access to a deployment grants a minimum of Viewer access to all code locations. Preventing access for specific code locations isn't currently supported. Additionally, having access to a deployment doesn't grant access to Branch Deployments - those permissions must be granted separately. | +| Code location | Pro | Defines the level of access for a given code location in a deployment.

      Dagster+ Pro users can [override the default deployment-level role for individual code locations](/dagster-plus/deployment/code-locations). For example, if the Deployment role is Launcher, you could override this role with a more permissive role, such as Editor or Admin.

      For non-Pro users, users will have the same level of access for all code locations in a deployment. | +| Branch deployments | All plans | Defines the level of access for all Branch Deployments in the code locations the user or team has access to. | + +### Applying role overrides + +As previously mentioned, you can define individual user roles for users in your organization. + +Dagster+ Pro users can also apply permission overrides to grant specific exceptions. + +Overrides may be used to apply a **more permissive** role. If, for example, the default role is **Admin** or **Organization Admin**, overrides will be disabled as these are the most permissive roles. + +#### Code locations + +To override a code location role for an individual user: + +1. Locate the user in the list of users. +2. Click **Edit**. +3. Click the toggle to the left of the deployment to open a list of code locations. +4. Next to a code location, click **Edit user role**. +5. Select the user role for the code location: + - TODO: add picture previously at "/images/dagster-cloud/user-token-management/code-location-override.png" +6. Click **Save**. + +#### Team members + +Users in your organization can belong to one or more [teams](/dagster-plus/access/rbac/teams). When determining a user's level of access, Dagster+ will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. + +For example, let's look at a user with the following roles for our `dev` deployment: + +- **Team 1**: Launcher +- **Team 2**: Viewer +- **Individual**: Viewer + +In this example, the user would have **Launcher** access to the `prod` deployment. This is because the Launcher role is more permissive than Viewer. + +The above also applies to code locations and Branch Deployment roles. + +#### Viewing overrides + +To view deployment-level overrides for a specific user, locate the user on the **Users** page and hover over a deployment: + +TODO: add picture previously at "/images/dagster-cloud/user-token-management/user-overrides-popup.png" + +If there are code location-level overrides, a small **N override(s)** link will display beneath the user's deployment role. Hover over it to display the list of overrides: + +TODO: add picture previously at "/images/dagster-cloud/user-token-management/code-location-override-popup.png" + +#### Removing overrides + +1. Locate the user in the list of users. +2. Click **Edit**. +3. To remove an override: + - **For a deployment**, click **Edit user role** next to the deployment. + - **For a code location**, click the toggle next to the deployment to display a list of code locations. Click **Edit user role** next to the code location. +4. Click the **Remove override** button. +5. Click **Save**. + +## User permissions reference + +### General + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| ------------------------------------------------------------------------ | ------ | -------- | ------ | ----- | ------------------------ | +| View runs of [jobs](/concepts/ops-jobs) | ✅ | ✅ | ✅ | ✅ | ✅ | +| Launch, re-execute, terminate, and delete runs of jobs | ❌ | ✅ | ✅ | ✅ | ✅ | +| Start and stop [schedules](/concepts/schedules) | ❌ | ❌ | ✅ | ✅ | ✅ | +| Start and stop [schedules](/concepts/sensors) | ❌ | ❌ | ✅ | ✅ | ✅ | +| Wipe assets | ❌ | ❌ | ✅ | ✅ | ✅ | +| Launch and cancel [schedules](/guides/backfill) | ❌ | ✅ | ✅ | ✅ | ✅ | +| Add dynamic partitions | ❌ | ❌ | ✅ | ✅ | ✅ | + +### Deployments + +Deployment settings are accessed in the UI by navigating to **user menu (your icon) > Organization Settings > Deployments**. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +|----------------------------------------------------------------------------------------------|-------|-----------|--------|-------|-------------------------------| +| View [deployments](/todo) | ✅ | ✅ | ✅ | ✅ | ✅ | +| Modify [deployment](/todo) settings | ❌ | ❌ | ✅ | ✅ | ✅ | +| Create, edit, delete [environment variables](/dagster-plus/deployment/environment-variables) | ❌ | ❌ | ✅ | ✅ | ✅ | +| View [environment variable](/dagster-plus/deployment/environment-variables) values | ❌ | ❌ | ✅ | ✅ | ✅ | +| Export [environment variables](/dagster-plus/deployment/environment-variables) | ❌ | ❌ | ✅ | ✅ | ✅ | +| Create and delete [deployments](/todo) | ❌ | ❌ | ❌ | ❌ | ✅ | +| Create [Branch Deployments](/dagster-plus/deployment/branch-deployments) | ❌ | ❌ | ✅ | ✅ | ✅ | + +### Code locations + +Code locations are accessed in the UI by navigating to **Deployment > Code locations**. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| ------------------------------------------------------------------------------- | ------ | -------- | ------ | ----- | ------------------------ | +| View [code locations](/dagster-plus/deployment/code-locations) | ✅ | ✅ | ✅ | ✅ | ✅ | +| Create and remove [code locations](/dagster-plus/deployment/code-locations) | ❌ | ❌ | ✅ | ✅ | ✅ | +| Reload [code locations](/dagster-plus/deployment/code-locations) and workspaces | ❌ | ❌ | ✅ | ✅ | ✅ | + +### Agent tokens + +Agent tokens are accessed in the UI by navigating to **user menu (your icon) > Organization Settings > Tokens**. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| ----------------------------------------------------------- | ------ | -------- | ------ | ----- | ------------------------ | +| View [agent tokens](/dagster-plus/deployment/hybrid/tokens) | ❌ | ❌ | ❌ | ❌ | ✅ | +| Create agent tokens | ❌ | ❌ | ❌ | ❌ | ✅ | +| Edit agent tokens | ❌ | ❌ | ❌ | ❌ | ✅ | +| Revoke agent tokens | ❌ | ❌ | ❌ | ❌ | ✅ | + +### User tokens + +User tokens are accessed in the UI by navigating to **user menu (your icon) > Organization Settings > Tokens**. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| ---------------------------------------- | ------ | -------- | ------ | ----- | ------------------------ | +| View and create own [user tokens](/todo) | ✅ | ✅ | ✅ | ✅ | ✅ | +| List all user tokens | ❌ | ❌ | ❌ | ❌ | ✅ | +| Revoke all user tokens | ❌ | ❌ | ❌ | ❌ | ✅ | + +### Users + +User management is accessed in the UI by navigating to **user menu (your icon) > Organization Settings > Users**. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| --------------------------------------------- | ------ | -------- | ------ | ----- | ------------------------ | +| [View users](/dagster-plus/access/rbac/users) | ✅ | ✅ | ✅ | ✅ | ✅ | +| Add users | ❌ | ❌ | ❌ | ✅ | ✅ | +| Edit user roles | ❌ | ❌ | ❌ | ❌ | ✅ | +| Remove users | ❌ | ❌ | ❌ | ❌ | ✅ | + +### Teams + +Team management is accessed in the UI by navigating to **user menu (your icon) > Organization Settings > Teams**. + +**Note**: Admin users can modify teams only in deployments where they're an Admin. + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| --------------------------------------------- | ------ | -------- | ------ | ----- | ------------------------ | +| [View teams](/dagster-plus/access/rbac/teams) | ✅ | ✅ | ✅ | ✅ | ✅ | +| Modify team permissions | ❌ | ❌ | ❌ | ✅ | ✅ | +| Create teams | ❌ | ❌ | ❌ | ❌ | ✅ | +| Re-name teams | ❌ | ❌ | ❌ | ❌ | ✅ | +| Add/remove team members | ❌ | ❌ | ❌ | ❌ | ✅ | +| Remove teams | ❌ | ❌ | ❌ | ❌ | ✅ | + +### Workspace administration + +| | Viewer | Launcher | Editor | Admin | Organization
      admin | +| ------------------------------------------------------ | ------ | -------- | ------ | ----- | ------------------------ | +| Manage [alerts](/dagster-plus/deployment/alerts) | ❌ | ❌ | ✅ | ✅ | ✅ | +| Edit workspace | ❌ | ❌ | ✅ | ✅ | ✅ | +| [Administer SAML](/dagster-plus/access/authentication) | ❌ | ❌ | ❌ | ❌ | ✅ | +| [Manage SCIM](/todo) | ❌ | ❌ | ❌ | ❌ | ✅ | +| View usage | ❌ | ❌ | ❌ | ❌ | ✅ | +| Manage billing | ❌ | ❌ | ❌ | ❌ | ✅ | +| View audit logs | ❌ | ❌ | ❌ | ❌ | ✅ | + +## Next steps + +- Learn more about how to manage users in Dagster+ in [Understanding User Management in Dagster+](/dagster-plus/access/rbac/users) +- Learn more about how to manage teams in Dagster+ in [Understanding Team Management in Dagster+](/dagster-plus/access/rbac/teams) +- Learn more about SCIM provisioning in [Understanding SCIM Provisioning](/todo) +- Learn more about authentication in [Understanding Authentication](/dagster-plus/access/authentication) diff --git a/docs/docs-beta/docs/dagster-plus/access/rbac/users.md b/docs/docs-beta/docs/dagster-plus/access/rbac/users.md new file mode 100644 index 0000000000000..0fa920b4c4ca9 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/access/rbac/users.md @@ -0,0 +1,93 @@ +--- +title: "Managing users in Dagster+" +displayed_sidebar: "dagsterPlus" +sidebar_label: "User management" +sidebar_position: 10 +--- + +Dagster+ allows you to grant specific permissions to your organization's users, ensuring that Dagster users have access only to what they require. + +In this guide, you'll learn how to manage users and their permissions using the Dagster+ UI. + +
      +Prerequisites + +- A Dagster+ account +- The required [Dagster+ permissions](/todo): + - **Organization Admins** can add, manage, and remove users + - **Admins** can add users + +
      + +## Before you start + +- **If System for Cross-domain Identity Management specification (SCIM) provisioning is enabled,** you'll need to add new users in your identity provider (IdP). Adding users will be disabled in Dagster+. +- **If using Google for Single sign-on (SSO)**, users must be added in Dagster+ before they can log in. +- **If using an Identity Provider (IdP) like Okta for SSO**, users must be assigned to the Dagster app in the IdP to be able to log in to Dagster+. Refer to the [SSO setup guides](/todo) for setup instructions for each of our supported IdP solutions. + +By default, users will be granted Viewer permissions on each deployment. The default role can be adjusted by modifying the [`sso_default_role` deployment setting](/todo). + +## Adding users to Dagster+ + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Users** tab. +4. Click **Add new user.** +5. In the **User email** field, enter the user's email address. +6. Click **Add user**. + +After the user is created, they will be notified via email, and you can [add the user to teams](#teams) and [assign user roles for each deployment](#user-roles). + +![Screenshot of assigning roles to a user](/img/placeholder.svg) + +## Adding users to teams \{#teams} + +:::note +Teams are a Dagster+ Pro feature. +::: + +Teams are useful for centralizing permission sets for different types of users. Refer to [Managing teams](/todo) for more information about creating and managing teams. + +![Screenshot of Managing teams page](/img/placeholder.svg) + +:::note +When determining a user's level of access, Dagster+ will use the **most permissive** role assigned to the user between all of their team memberships and any individual role grants. Refer to [Managing user roles and permissions](/todo) for more information. +::: + +## Assigning user roles \{#user-roles} + +In the **Roles** section, you can assign a [user role](/todo) for each deployment, granting them a set of permissions that controls their access to various features and functionalities within the platform. + +1. Next to a deployment, click **Edit user role**. +2. Select the user role for the deployment. This [user role](/todo) will be used as the default for all code locations in the deployment. +3. Click **Save**. +4. **Pro only**: To set permissions for individual [code locations](/todo) in a deployment: + 1. Click the toggle to the left of the deployment to open a list of code locations. + 2. Next to a code location, click **Edit user role**. + 3. Select the user role for the code location. + 4. Click **Save**. +5. Repeat the previous steps for each deployment. +6. **Optional**: To change the user's permissions for branch deployments: + 1. Next to **All branch deployments**, click **Edit user role**. + 2. Select the user role to use for all branch deployments. + 3. Click **Save**. +7. Click **Done**. + +## Removing users + +Removing a user removes them from the organization. **Note**: If using a SAML-based SSO solution like Okta, you'll also need to remove the user from the IdP. Removing the user in Dagster+ doesn't remove them from the IdP. + +1. Sign in to your Dagster+ account. +2. Click the **user menu (your icon) > Organization Settings**. +3. Click the **Users** tab. +4. Locate the user in the user list. +5. Click **Edit**. +6. Click **Remove user**. +7. When prompted, confirm the removal. + +## Next steps + +- Learn more about role-based access control (RBAC) in [Understanding User Roles & Permissions](/dagster-plus/access/rbac/user-roles-permissions) +- Learn more about how to manage teams in Dagster+ in [Understanding Team Management in Dagster+](/dagster-plus/access/rbac/teams) +- Learn more about SCIM provisioning in [Understanding SCIM Provisioning](/dagster-plus/access/authentication/scim-provisioning) +- Learn more about authentication in [Understanding Authentication](/dagster-plus/access/authentication) diff --git a/docs/docs-beta/docs/dagster-plus/data-catalog.md b/docs/docs-beta/docs/dagster-plus/data-catalog.md new file mode 100644 index 0000000000000..ac71fc43f8cde --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/data-catalog.md @@ -0,0 +1,6 @@ +--- +title: "Data catalog" +unlisted: true +--- + +# Dagster+ data catalog diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts.md new file mode 100644 index 0000000000000..37c7b6c897035 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts.md @@ -0,0 +1,322 @@ +--- +title: Setting up alerts on Dagster+ +sidebar_position: 30 +sidebar_label: "Dagster+ Alerts" +--- +[comment]: <> (This file is automatically generated by `dagster-plus/deployment/alerts/generate_alerts_doc.py`) + +Dagster+ allows you to configure alerts to automatically fire in response to a range of events. These alerts can be sent to a variety of different services, depending on your organization's needs. + +These alerts can be configured in the Dagster+ UI, or using the `dagster-cloud` CLI tool. + +
      +Prerequisites +- **Organization**, **Admin**, or **Editor** permissions on Dagster+ +
      + +## Configuring a notification service + +To start, you'll need to configure a service to send alerts. Dagster+ current supports sending alerts through email, Microsoft Teams, PagerDuty, and Slack. + + + + No additional configuration is required to send emails from Dagster+. + +All alert emails will be sent by `"no-reply@dagster.cloud"` or `"no-reply@.dagster.cloud"`. Alerts can be configured to be sent to any number of emails. + + + Create an incoming webhook by following the [Microsoft Teams documentation](https://learn.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook?tabs=newteams%2Cdotnet). + +This will provide you with a **webhook URL** which will be required when configuring alerts in the UI (after selecting "Microsoft Teams" as your Notification Service) or using the CLI (in the `notification_service` configuration). + + + + :::note +You will need sufficient permissions in PagerDuty to add or edit services. +::: + +In PagerDuty, you can either: + +- [Create a new service](https://support.pagerduty.com/main/docs/services-and-integrations#create-a-service), and add Dagster+ as an integration, or +- [Edit an existing service](https://support.pagerduty.com/main/docs/services-and-integrations#edit-service-settings) to include Dagster+ as an integration + +When configuring the integration, choose **Dagster+** as the integration type, and choose an integration name in the format `dagster-plus-{your_service_name}`. + +After adding your new integration, you will be taken to a screen containing an **Integration Key**. This value will be required when configuring alerts in the UI (after selecting "PagerDuty" as your Notification Service) or using the CLI (in the `notification_service` configuration). + + + + :::note +You will need sufficient permissions in Slack to add apps to your workspace. +::: +Navigate to **Deployment > Alerts** in the Dagster+ UI and click **Connect to Slack**. From there, you can complete the installation process. + +When setting up an alert, you can choose a Slack channel to send those alerts to. Make sure to invite the `@Dagster+` bot to any channel that you'd like to receive an alert in. + + + + +## Alerting when a run fails +You can set up alerts to notify you when a run fails. + +By default, these alerts will target all runs in the deployment, but they can be scoped to runs with a specific tag. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Run alert** from the dropdown. + +5. Select **Job failure**. + +If desired, add **tags** in the format `{key}:{value}` to filter the runs that will be considered. + + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when a run is taking too long to complete +You can set up alerts to notify you whenever a run takes more than some threshold amount of time. + + By default, these alerts will target all runs in the deployment, but they can be scoped to runs with a specific tag. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Run alert** from the dropdown. + +5. Select **Job running over** and how many hours to alert after. + +If desired, add **tags** in the format `{key}:{value}` to filter the runs that will be considered. + + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when an asset fails to materialize +You can set up alerts to notify you when an asset materialization attempt fails. + +By default, these alerts will target all assets in the deployment, but they can be scoped to a specific asset or group of assets. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Asset alert** from the dropdown. + +5. Select **Failure** under the **Materializations** heading. + +If desired, select a **target** from the dropdown menu to scope this alert to a specific asset or group. + + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when an asset check fails +You can set up alerts to notify you when an asset check on an asset fails. + +By default, these alerts will target all assets in the deployment, but they can be scoped to checks on a specific asset or group of assets. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Asset alert** from the dropdown. + +5. Select **Failed (ERROR)** under the **Asset Checks** heading. + +If desired, select a **target** from the dropdown menu to scope this alert to a specific asset or group. + + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when a schedule or sensor tick fails +You can set up alerts to fire when any schedule or sensor tick across your entire deployment fails. + +Alerts are sent only when a schedule/sensor transitions from **success** to **failure**, so only the initial failure will trigger the alert. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Schedule/Sensor alert** from the dropdown. + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when a code location fails to load +You can set up alerts to fire when any code location fails to load due to an error. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Code location error alert** from the dropdown. + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + + +## Alerting when a Hybrid agent becomes unavailable +:::note +This is only available for [Hybrid](/todo) deployments. +::: + +You can set up alerts to fire if your Hybrid agent hasn't sent a heartbeat in the last 5 minutes. + + + 1. In the Dagster UI, click **Deployment**. +2. Click the **Alerts** tab. +3. Click **Add alert policy**. +4. Select **Code location error alert** from the dropdown. + + + Execute the following command to sync the configured alert policy to your Dagster+ deployment. + + ```bash + dagster-cloud deployment alert-policies sync -a /path/to/alert_policies.yaml + ``` + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/cli.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/cli.md new file mode 100644 index 0000000000000..9875be88b9b7c --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/cli.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ alerts with the CLI" +unlisted: true +--- + +# Alerts with the CLI diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/email.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/email.md new file mode 100644 index 0000000000000..b99b231da11ac --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/email.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ email alerts" +unlisted: true +--- + +# Dagster+ email alerts diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/microsoft-teams.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/microsoft-teams.md new file mode 100644 index 0000000000000..87dcdbecf01a8 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/microsoft-teams.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ Microsoft Teams alerts" +unlisted: true +--- + +# Dagster+ Microsoft Teams alerts diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/pagerduty.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/pagerduty.md new file mode 100644 index 0000000000000..6348eb84d7608 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/pagerduty.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ PagerDuty alerts" +unlisted: true +--- + +# Dagster+ PagerDuty alerts diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/slack.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/slack.md new file mode 100644 index 0000000000000..7750f783e4e1f --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/slack.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ Slack alerts" +unlisted: true +--- + +# Dagster+ Slack alerts diff --git a/docs/docs-beta/docs/dagster-plus/deployment/alerts/ui.md b/docs/docs-beta/docs/dagster-plus/deployment/alerts/ui.md new file mode 100644 index 0000000000000..15555e841c632 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/alerts/ui.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ alerts in the UI" +unlisted: true +--- + +# Alerts in the UI diff --git a/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments.md b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments.md new file mode 100644 index 0000000000000..004dd178c1c7e --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments.md @@ -0,0 +1,8 @@ +--- +title: "Branch Deployments (CI)" +displayed_sidebar: "dagsterPlus" +sidebar_position: 3 +unlisted: true +--- + +# Branch Deployments (CI) diff --git a/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/change-tracking.md b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/change-tracking.md new file mode 100644 index 0000000000000..77a183daf1ab4 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/change-tracking.md @@ -0,0 +1,9 @@ +--- +title: "Change Tracking in Branch Deployments" +displayed_sidebar: "dagsterPlus" +sidebar_position: 4 +sidebar_label: "Change Tracking" +unlisted: true +--- + +# Change Tracking in Branch Deployments diff --git a/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/dagster-cloud-cli.md b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/dagster-cloud-cli.md new file mode 100644 index 0000000000000..5794c59abf1ae --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/dagster-cloud-cli.md @@ -0,0 +1,9 @@ +--- +title: "Branch Deployments & the dagster-cloud CLI" +displayed_sidebar: "dagsterPlus" +sidebar_position: 3 +sidebar_label: "dagster-cloud CLI" +unlisted: true +--- + +# Use Branch Deployments with the dagster-cloud CLI diff --git a/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/setting-up-branch-deployments.md b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/setting-up-branch-deployments.md new file mode 100644 index 0000000000000..9e47800077f89 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/setting-up-branch-deployments.md @@ -0,0 +1,449 @@ +--- +title: "Setting up Branch Deployments" +displayed_sidebar: "dagsterPlus" +sidebar_position: 1 +sidebar_label: "Setting up branch deployments" +toc_max_heading_level: 2 +--- + +In this guide, we'll walk you through setting up Branch Deployments for a code location. Once you're finished, any time a PR is created or updated in your repository, it will automatically create or update an associated branch deployment in Dagster+. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization Admin** permissions in Dagster+ +- The ability to run a new agent in your infrastructure (only if you are using a **Hybrid deployment**) + +
      + +## Step 1: Choose a method + +Choose a method for setting up branch deployments: + + + + +You can set up GitHub to automatically create branch deployments for new PRs, using GitHub Actions. + +Using this approach to branch deployments may be a good fit if: + +- You use **GitHub** for version control +- You want Dagster to fully automate Branch Deployments + +This approach is simplified if you use the [GitHub integration](/todo) to import your project into Dagster+. + + + + +You can set up GitLab to automatically create branch deployments for new PRs, using GitLab's CI/CD workflow. + +Using this approach to branch deployments may be a good fit if: + +- You use **GitLab** for version control +- You want Dagster to fully automate Branch Deployments + +This approach is simplified if you use the [GitLab integration](/todo) to import your project into Dagster+. + + + + +You can manually execute dagster-cloud CLI commands to deploy and manage branch deployments. + +Using this approach to branch deployments may be a good fit if: + +- You don't use GitHub or GitLab for version control +- You use an alternative CI platform +- You want full control over Branch Deployment configuration + +This is a more advanced option than the other methods. + + + + +## Step 2: Generate a Dagster+ agent token + +In this step, you'll generate a token for the Dagster+ agent. The Dagster+ agent will use this to authenticate to the agent API. + +1. Sign in to your Dagster+ instance. +2. Click the **user menu (your icon) > Organization Settings**. +3. In the **Organization Settings** page, click the **Tokens** tab. +4. Click the **Create agent token** button. +5. After the token has been created, click **Reveal token**. + +Keep the token somewhere handy - you'll need it to complete the setup. + +## Step 3: Create and configure an agent + +:::note +If using [Serverless deployment](/dagster-plus/deployment/serverless), this step can be skipped. +::: + +While you can use your existing production agent, we recommend creating a dedicated branch deployment agent. This ensures that your production instance isn't negatively impacted by the workload associated with branch deployments. + + + + + 1. **Deploy an ECS agent to serve your branch deployments**. Follow the [ECS agent](/dagster-plus/deployment/hybrid/agents/amazon-ecs-new-vpc) setup guide, making sure to set the **Enable Branch Deployments** parameter if using the CloudFormation template. If you are running an existing agent, follow the [upgrade guide](/dagster-plus/deployment/hybrid/agents/amazon-ecs-existing-vpc) to ensure your template is up-to-date. Then, turn on the **Enable Branch Deployments** parameter. + + 2. **Create a private [Amazon Elastic Registry (ECR) repository](https://console.aws.amazon.com/ecr/repositories).** Refer to the [AWS ECR documentation](https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-create.html) for instructions. + + After the repository has been created, navigate back to the list of [ECR repositories](https://console.aws.amazon.com/ecr/repositories). + + In the list, locate the repository and its **URI**: + + ![Show this in the UI](/img/placeholder.svg) + + Keep this around, as you'll need it in a later step. + + 3. [**Create an IAM user.**](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_users_create.html) This user must: + + - Have push access to the ECR repository, and + - Have programmatic access to AWS using an [access key](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html) + + After the user is created, save the **Access key ID** and **Secret access key** values shown on the confirmation page: + + ![Show this in the UI](/img/placeholder.svg) + + + + + 1. Set up a new Docker agent. Refer to the [Docker agent setup guide](/dagster-plus/deployment/hybrid/agents/docker) for instructions. + 2. After the agent is set up, modify the `dagster.yaml` file as follows: + + - Set the `dagster_cloud_api.branch_deployments` field to `true` + - Remove any `deployment` field(s) + + For example: + + + + + + + 1. Set up a new Kubernetes agent. Refer to the [Kubernetes agent setup guide](/dagster-plus/deployment/hybrid/agents/kubernetes) for instructions. + + 2. After the agent is set up, modify your Helm values file to include the following: + + + + + + +## Step 4: Set up branch deployments + + + + +### Step 4.1: Add GitHub CI/CD script to your project +:::note +If you used the GitHub app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-github-action-runs) +::: + +Copy the following files to your project, and **replace** all references to `quickstart-etl` with the name of your project: + +- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) +- [`.github/workflows/dagster-cloud-deploy.yml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/.github/workflows/dagster-cloud-deploy.yml) (for **Hybrid** deployments) +- [`.github/workflows/branch_deployments.yml`](https://github.com/dagster-io/dagster-cloud-serverless-quickstart/blob/main/.github/workflows/branch_deployments.yml) (for **Serverless** deployments) + +In the next step, you'll modify these files to work with your Dagster+ setup. + +### Step 4.2: Add the agent registry to dagster_cloud.yaml + +:::note +If you used the GitHub app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-github-action-runs) +::: + +In the `dagster_cloud.yaml` file, replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-generate-a-dagster-agent-token). + +For example: + + + +### Step 4.3: Configure GitHub Action secrets + +:::note +If you used the GitHub app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-github-action-runs) +::: + +1. In your GitHub repository, click the **Settings** tab. +2. In the **Security** section of the sidebar, click **Secrets > Actions**. +3. Click **New repository secret**. +4. In the **Name** field, enter the name of the secret. For example, `DAGSTER_CLOUD_API_TOKEN` +5. In the **Value** field, paste the value of the secret. +6. Click **Add secret**. + +Repeat steps 3-6 for each of the secrets required for the registry used by the agent you created in Step 2. See below for more details: + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `DOCKERHUB_USERNAME` - Your DockerHub username +- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 3](#step-3-create-and-configure-an-agent) +- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 3](#step-3-create-and-configure-an-agent) +- `AWS_REGION` - The AWS region where your ECR registry is located + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `GCR_JSON_KEY` - Your GCR JSON credentials + + + + + +### Step 4.4: Configure GitHub Action + +:::note +If you used the GitHub app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-github-action-runs) +::: + +In this step, you'll update the GitHub workflow files in your repository to set up Docker registry access. + +In the `.github/workflows/dagster-cloud-deploy.yml` file, un-comment the `step` associated with your registry. For example, for an Amazon ECR registry, you'd un-comment the following portion of the workflow file: + +```yaml +jobs: + dagster-cloud-deploy: + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} +``` + +Save and commit the file to your repository. + +### Step 4.5: Verify GitHub action runs + +The last step is to verify that the GitHub Action runs successfully. + +1. In the repository, click the **Actions** tab. +2. In the list of workflows, locate the latest branch deployment run. For example: + +![Show this in the UI](/img/placeholder.svg) + + + + +### Step 4.1: add GitLab CI/CD script to your project + +:::note +If you used the GitLab app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-gitlab-pipeline-runs) +::: + +Copy the following files to your project, and **replace** all references to `quickstart-etl` with the name of your project: + +- [`dagster_cloud.yaml`](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/dagster_cloud.yaml) +- [`.gitlab-ci.yml`](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/hybrid-ci.yml) (for **Hybrid** deployments) +- [`.gitlab-ci.yml`](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/serverless-ci.yml) (for **Serverless** deployments) + +In the next step, you'll modify these files to work with your Dagster+ setup. + +### Step 4.2: add the agent registry to dagster_cloud.yaml + +:::note +If you used the GitLab app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-gitlab-pipeline-runs) +::: + + +In the `dagster_cloud.yaml` file, replace `build.registry` with the registry used by the [agent you created in Step 2](#step-2-generate-a-dagster-agent-token). + +For example: + + + +### Step 4.3: configure GitLab CI/CD variables + +:::note +If you used the GitLab app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-gitlab-pipeline-runs) +::: + + +1. In your project, click the **Settings** tab. +2. In the **CI/CD** section of the sidebar, expand **Variables**. +3. Click **Add variable**. +4. In the **Key** field, enter the name of the variable. For example, `DAGSTER_CLOUD_API_TOKEN` +5. In the **Value** field, paste the value of the variable. +6. Click **Add variable**. + +Repeat steps 3-6 for each of the secrets required for your registry type: + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `DOCKERHUB_USERNAME` - Your DockerHub username +- `DOCKERHUB_TOKEN` - A DockerHub [access token](https://docs.docker.com/docker-hub/access-tokens/#create-an-access-token) + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `AWS_ACCESS_KEY` - The **Access key ID** of the AWS IAM user you created in [Step 3](#step-3-create-and-configure-an-agent) +- `AWS_SECRET_ACCESS_KEY` - The **Secret access key** of the AWS IAM user you created in [Step 3](#step-3-create-and-configure-an-agent) +- `AWS_REGION` - The AWS region where your ECR registry is located + + + + + +- `DAGSTER_CLOUD_API_TOKEN` - The Dagster+ agent token you created in [Step 2](#step-2-generate-a-dagster-agent-token) +- `DAGSTER_CLOUD_URL` - Your Dagster+ base URL (`https://my_org.dagster.cloud`) +- `GCR_JSON_KEY` - Your GCR JSON credentials + + + + + +### Step 4.4: configure GitLab CI/CD script + +:::note +If you used the GitLab app to configure you're repository, this step isn't required. [Skip ahead to Step 4.5](#step-45-verify-gitlab-pipeline-runs) +::: + +In this step, you'll update the GitLab CI/CD config to set up Docker registry access. + +In the `.gitlab-ci.yml` file, un-comment the `step` associated with your registry. For example, for the GitLab container registry, you'd un-comment the following portion of the `.gitlab-ci.yml` file: + +```yaml +build-image: + ... + before_script: + # For GitLab Container Registry + - echo $CI_JOB_TOKEN | docker login --username $CI_REGISTRY_USER --password-stdin $REGISTRY_URL +``` + +Save and commit the files to the project. + +### Step 4.5: verify GitLab pipeline runs + +The last step is to verify that the GitLab pipeline runs successfully. + +1. On the project page, click the **CI/CD** tab. +2. In the list of pipelines, locate the latest branch deployment run. For example: + +![Show this in the UI](/img/placeholder.svg) + + + + +Whenever the state of your branch is updated, Dagster+ expects the following steps to occur: + +1. A new image containing your code and requirements is built on the branch. Refer to [Managing code locations](/todo) to learn more. + +2. The new image is pushed to a Docker registry accessible to your agent. + +The details of how this is accomplished depend on your specific CI/CD solution. + +:::note + +The following examples assume the registry URL and image tag are stored in the `LOCATION_REGISTRY_URL` and `IMAGE_TAG` environment variables. + +::: + +### Step 4.1 Create a branch deployment associated with the branch + +Execute the following command within your CI/CD process: + + ```shell + BRANCH_DEPLOYMENT_NAME=$( + dagster-cloud branch-deployment create-or-update \ + --organization $ORGANIZATION_NAME \ + --api-token $DAGSTER_CLOUD_API_TOKEN \ # Agent token from Step 1 + --git-repo-name $REPOSITORY_NAME \ # Git repository name + --branch-name $BRANCH_NAME \ # Git branch name + --commit-hash $COMMIT_SHA \ # Latest commit SHA on the branch + --timestamp $TIMESTAMP # UTC unixtime timestamp of the latest commit + ) + ``` + +One or more additional parameters can optionally be supplied to the `create-or-update` command to enhance the Branch Deployments UI in Dagster+: + +```shell +BRANCH_DEPLOYMENT_NAME=$( + dagster-cloud branch-deployment create-or-update \ + --organization $ORGANIZATION_NAME \ + --api-token $DAGSTER_CLOUD_API_TOKEN \ + --git-repo-name $REPOSITORY_NAME \ + --branch-name $BRANCH_NAME \ + --commit-hash $COMMIT_SHA \ + --timestamp $TIMESTAMP + --code-review-url $PR_URL \ # URL to review the given changes, e.g. + # Pull Request or Merge Request + --code-review-id $INPUT_PR \ # Alphanumeric ID for the given set of changes + --pull-request-status $PR_STATUS \ # A status, one of `OPEN`, `CLOSED`, + # or `MERGED`, that describes the set of changes + --commit-message $MESSAGE \ # The message associated with the latest commit + --author-name $NAME \ # A display name for the latest commit's author + --author-email $EMAIL \ # An email for the latest commit's author + --author-avatar-url $AVATAR_URL # An avatar URL for the latest commit's author + --base-deployment-name $BASE_DEPLOYMENT_NAME # The main deployment that will be compared against. Default is 'prod' +) +``` + +If the command is being executed from the context of the git repository, you can alternatively pull this metadata from the repository itself: + +```shell +BRANCH_DEPLOYMENT_NAME=$( + dagster-cloud branch-deployment create-or-update \ + --organization $ORGANIZATION_NAME \ + --api-token $DAGSTER_CLOUD_API_TOKEN \ + --git-repo-name $REPOSITORY_NAME \ + --branch-name $BRANCH_NAME \ + --read-git-state # Equivalent to passing --commit-hash, --timestamp + # --commit-message, --author-name, --author-email +) +``` + +### Step 4.2 Deploy your code to the branch deployment + +Execute the following command within your CI/CD process: + +```shell +dagster-cloud deployment add-location \ + --organization $ORGANIZATION_NAME \ + --deployment $BRANCH_DEPLOYMENT_NAME \ + --api-token $DAGSTER_CLOUD_API_TOKEN \ + --location-file $LOCATION_FILE \ + --location-name $LOCATION_NAME \ + --image "${LOCATION_REGISTRY_URL}:${IMAGE_TAG}" \ + --commit-hash "${COMMIT_SHA}" \ + --git-url "${GIT_URL}" +``` + + + + +## Next steps + +- Learn more about [Branch Deployments](/dagster-plus/deployment/branch-deployments) +- Learn how to [Track changes on a Branch Deployment](/dagster-plus/deployment/branch-deployments/change-tracking) diff --git a/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/testing.md b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/testing.md new file mode 100644 index 0000000000000..ccd134eb8eb2b --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/branch-deployments/testing.md @@ -0,0 +1,4 @@ +--- +title: "Testing against production with Branch Deployments" +unlisted: true +--- diff --git a/docs/docs-beta/docs/dagster-plus/deployment/code-locations.md b/docs/docs-beta/docs/dagster-plus/deployment/code-locations.md new file mode 100644 index 0000000000000..093e6ee4391ab --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/code-locations.md @@ -0,0 +1,239 @@ +--- +title: "Code locations" +displayed_sidebar: "dagsterPlus" +--- + +# Code locations + +Separate code locations allow you to deploy different projects that still roll up into a single Dagster+ deployment with one global lineage graph. + +This guide will cover three options for adding a new code location: +- Adding a code location manually +- Adding a code location in a new Git repository +- Adding a new code location to an existing Git monorepo + +
      +Prerequisites + +1. An existing Dagster project. Refer to the [recommended project structure](/tutorial/create-new-project) and [code requirements](/dagster-plus/deployment/code-requirements) pages for more information. + +2. Editor, Admin, or Organization Admin permissions in Dagster+. + +3. To know your Dagster+ deployment type. An administrator can help find this information, or you can locate it by clicking *Deployment > Agents tab* in the Dagster UI. Dagster+ Serverless organizations will have a *Managed by Dagster+* label next to the agent. + +
      + +Adding a code location follows two steps: +- For Dagster+ Hybrid, ensuring the Dagster code is in a place accessible by your agent, usually by building a Docker image with your code that's pushed to a registry. For Dagster+ Serverless you can skip this step. +- Notifying Dagster+ of the new or updated code location. This will be done by using the Dagster+ Python client. + +Often these two steps are handled by CI/CD connected to your Git repository. + + +## Add a new code location manually + +Start by installing the `dagster-cloud` Python client: + +``` +pip install dagster-cloud +``` + +Next you will want need to authenticate this Python client: + +1. In the Dagster+ UI, click the user icon in the upper right corner. +2. Click **Organization settings**, then the **Tokens** tab. +3. Click the **Create user token** button. +4. Copy the token. +5. Set the following environment variables: + + ```bash + export DAGSTER_CLOUD_ORGANIZATION="organization-name" # if your URL is https://acme.dagster.plus your organization name is "acme" + export DAGSTER_CLOUD_API_TOKEN="your-token" + ``` + +Now add the code location. The following example assumes you are running the command from the top-level working directory of your Dagster project with a project named "quickstart" structured as a Python module named "quickstart". + +```bash +/quickstart + setup.py + pyproject.toml + /quickstart + __init__.py + /assets + /resources +``` + +The commands below take two main arguments: +- `module_name` is determined by your code structure +- `location_name` is the unique label for this code location used in Dagster+ + + + + +If you are using Dagster+ Serverless, run the following command to add a code location: + +```bash +dagster-cloud serverless deploy-python-executable --deployment prod --location-name quickstart --module-name quickstart +``` + +Running the command multiple times with the same location name will *update* the code location. Running the command with a new location name will *add* a code location. + + + +If you are using Dagster+ Hybrid, make sure you have deployed the code appropriately by [building a Docker image and pushing it to an image registry](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart). Then run this command, using the image URI which is available from your registry: + +```bash +dagster-cloud deployment add-location --deployment prod --location-name quickstart --module-name quickstart --image 764506304434.dkr.ecr.us-west-2.amazonaws.com/hooli-data-science-prod:latest +``` + + + +After running the command you can verify the code location was deployed by navigating to the *Deployments* tab on Dagster+. + +## Adding a code location in a new Git repository + +Adding a code location to a Git repository follows the same steps as adding a code location manually, but automates those steps by running them through CI/CD. + +To get started, review the appropriate example repository and then create your Git repository with the same structure. + +- [GitHub repository with Dagster+ Serverless](https://github.com/dagster-io/dagster-cloud-serverless-quickstart/) +- [GitHub repository with Dagster+ Hybrid](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/) +- [GitLab CI/CD for Dagster+ Serverless](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/serverless-ci.yml) +- [GitLab CI/CD for Dagster+ Hybrid](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/hybrid-ci.yml) + + +Overall, the Git repository should contain: + +1. Your Dagster code, structured as a Python module. For Dagter+ Hybrid you may need a [Dockerfile](https://github.com/dagster-io/dagster-cloud-hybrid-quickstart/blob/main/Dockerfile) as well. The repository might look like this: + + ```bash + README.md + dagster_cloud.yaml + Dockerfile + /.github + /workflows + dagster-cloud-deploy.yml + setup.py + pyproject.toml + /quickstart + __init__.py + definitions.py + /assets + ... + /resources + ... + ``` + +2. A [`dagster_cloud.yaml` file](/todo) with the settings for your code location. Here is an example: + + ```yaml title="dagster_cloud.yaml + locations: + - location_name: quickstart + code_source: + package_name: quickstart + ``` + +3. A CI/CD workflow file that contains the steps for adding your code location. These are the same steps outlined in the preceding section. Here is a minimal example workflow file for a Dagster+ Hybrid organization based on [this GitLab template](https://github.com/dagster-io/dagster-cloud-action/blob/main/gitlab/hybrid-ci.yml). + + ```yaml + variables: + DAGSTER_CLOUD_ORGANIZATION: + DAGSTER_PROJECT_DIR: . + IMAGE_REGISTRY: .dkr.ecr.us-west-2.amazonaws.com/ + IMAGE_TAG: $CI_COMMIT_SHORT_SHA-$CI_PIPELINE_ID + + stages: + - build + - deploy + + build: + stage: build + image: docker:latest + services: + - docker:dind + before_script: + # # For Gitlab Container Registry + # - echo $CI_JOB_TOKEN | docker login --username $CI_REGISTRY_USER --password-stdin $REGISTRY_URL + # # For DockerHub + # - echo $DOCKERHUB_TOKEN | docker login --username $DOCKERHUB_USERNAME --password-stdin $REGISTRY_URL + # # For AWS Elastic Container Registry (ECR) + # - apk add --no-cache curl jq python3 py3-pip + # - pip install awscli + # - echo $AWS_ECR_PASSWORD | docker login --username AWS --password-stdin $IMAGE_REGISTRY + # # For Google Container Registry (GCR) + # - echo $GCR_JSON_KEY | docker login --username _json_key --password-stdin $REGISTRY_URL + script: + - docker build . -t $IMAGE_REGISTRY:$IMAGE_TAG + - docker push $IMAGE_REGISTRY:$IMAGE_TAG + + deploy: + stage: deploy + dependencies: + - build + image: ghcr.io/dagster-io/dagster-cloud-action:0.1.43 + script: + - dagster-cloud deployment add-location --deployment prod --image + $IMAGE_REGISTRY:$IMAGE_TAG --location-name quickstart --package-name quickstart + ``` + +Once your Git repository has this structure, you will want to run your CI/CD process. The CI/CD process will add the code location to Dagster+ which can be verified by viewing the *Deployments* tab. + +## Adding a new code location to a Git monorepo + +Many organizations use a Git monorepo to contain multiple Dagster projects. Here is an example of DagsterLab's own [internal data engineering Git repository](https://github.com/dagster-io/dagster-open-platform). + +To add a new code location to a monorepo, create a new directory that contains your Dagster project. The final repository structure might look like this: + +``` +README.md +dagster_cloud.yaml +/.github + ... +/shared + setup.py + pyproject.toml + /shared + __init__.py + utilities.py +/core + setup.py + pyproject.toml + Dockerfile + /core + definitions.py + __init__.py + /assets +/new-code-location + setup.py + pyproject.toml + Dockerfile + /new-code-location + definitions.py + __init__.py + /assets +``` + +Then update the `dagster_cloud.yaml` file in the root of the Git repository, adding a location section for your project including the location name, code source, and build directory. For Dagster+ Hybrid, include the registry. If you don't know the registry, consult your administrator or the team that set up CI/CD for the Git repository. + +```yaml +locations: + - location_name: core + code_source: + package_name: core + build: + directory: ./core + registry: your-registry/image # eg 764506304434.dkr.ecr.us-west-2.amazonaws.com/core + - location_name: new + code_source: + package_name: new + build: + directory: ./new + registry: your-registry/image # eg 764506304434.dkr.ecr.us-west-2.amazonaws.com/new +``` + +The monorepo should have CI/CD configured to deploy your changes and add or update your new code location. After adding your code and updating the `dagster_cloud.yaml` file, trigger the CI/CD process to add your code location to Dagster+. Navigate to the *Deployments* tab in Dagster+ to confirm your code location was added. + +## Next steps + +- After adding a code location, you may want to setup access controls +- You may want to add additional configuration to your code location. This configuration will vary by agent type, but see examples for [setting default resource limits for Kubernetes](/dagster-plus/deployment/hybrid/agents/kubernetes) or [changing the IAM role for ECS](/todo). diff --git a/docs/docs-beta/docs/dagster-plus/deployment/code-locations/code-location-history.md b/docs/docs-beta/docs/dagster-plus/deployment/code-locations/code-location-history.md new file mode 100644 index 0000000000000..932f32d19d1b7 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/code-locations/code-location-history.md @@ -0,0 +1,57 @@ +--- +title: "Code location history and rollbacks" +displayed_sidebar: "dagsterPlus" +sidebar_position: 4 +sidebar_label: "Code location history and rollbacks" +--- + +# Code location history and rollbacks + +Dagster+ automatically tracks metadata every time a code location is loaded. This can be used to understand when changes have been made, and what those changes were. In addition, this metadata can be used to quickly redeploy an older version. + +
      + Prerequisites + +Before continuing, you should be familiar with: + +- [Code Locations](/dagster-plus/deployment/code-locations) + +
      + +## Viewing code location history + +1. In the Dagster+ UI, navigate to the **Deployment** tab. +2. In the row associated with the code location you're interested in, click **View history** in the **Updated** column. + +![Screenshot highlighting the "Updated" column for a code location](/img/placeholder.svg) + +This will bring up a modal showing a history of every time that code location has been loaded, and metadata associated with that load. If you have connected Dagster+ to a GitHub or GitLab repository, each row will have a link to the commit that was deployed at that point in time. + +If a code location has been deployed multiple times with identical metadata, these rows will be collapsed together. You can expand them by deselecting **Collapse similar entries** in the top left corner of the modal. + +This metadata will also include information regarding assets that have been **added**, **removed**, or **changed**. In the **Assets** column, you can see the keys of assets in any of these categories. + +![Screenshot highlighting the column that displays these keys](/img/placeholder.svg) + +Currently, changes to **code version**, **tags**, **metadata**, **dependencies** and **partitions definition** are tracked. Clicking on any of these assets brings you to its **Asset details** page. Here, you can find the **Change history** tab, and see detailed information regarding each time the asset definition has changed. + +![Screenshot highlighting the Change History tab for an asset](/img/placeholder.svg) + +## Rolling back to a previous code location version + +:::note +To initiate a rollback, you'll need **Organization**, **Admin**, or **Editor** permissions +::: + +If you notice an issue with newly deployed code, or your code fails to deploy successfully, you can quickly roll back to a previously deployed image that's known to work properly. + +1. In the Dagster+ UI, navigate to the **Deployment** tab. +2. In the row associated with the code location you're interested in, click **View history** in the **Updated** column. +3. In the **Actions** column click the dropdown menu to the right of **View metadata**, select **Rollback to this version**. + +![Screenshot highlighting the "Updated" column for a code location](/img/placeholder.svg) + +## Next steps + +- Learn more about [Code Locations](/dagster-plus/deployment/code-locations) +- Learn how to [Alert when a code location fails to load](/dagster-plus/deployment/alerts#alerting-when-a-code-location-fails-to-load) diff --git a/docs/docs-beta/docs/dagster-plus/deployment/code-requirements.md b/docs/docs-beta/docs/dagster-plus/deployment/code-requirements.md new file mode 100644 index 0000000000000..4de13201cdd03 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/code-requirements.md @@ -0,0 +1,45 @@ +--- +title: 'Dagster+ code requirements' +displayed_sidebar: 'dagsterPlus' +sidebar_label: "Code requirements" +--- + +Your Dagster project must meet a few requirements to run in Dagster+. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A basic understanding of Python project structure and Docker +
      + +## General requirements + +:::tip +**Learn by example?** Check out [an example repo](https://github.com/dagster-io/hooli-data-eng-pipelines) which is set up to run in Dagster+. +::: + +To work with Dagster+, your Dagster code: + +- **Must be loaded from a single entry point: either a Python file or package.** This entry point can load repositories from other files or packages. + +- **Must run in an environment where the `dagster` and `dagster-cloud` 0.13.2 or later Python packages are installed.** + +**Note**: + +- Different code locations can use different versions of Dagster +- Dagster+ doesn't require a [`workspace.yaml` file](/todo). You can still create a `workspace.yaml` file to load your code in an open source Dagster webserver instance, but doing so won't affect how your code is loaded in Dagster+. + +## Hybrid deployment requirements + +If you're using [Hybrid Deployment](/dagster-plus/deployment/hybrid), there are a few additional requirements. + +- **If using an Amazon Elastic Container Service (ECS), Kubernetes, or Docker agent**, your code must be packaged into a Docker image and pushed to a registry your agent can access. Dagster+ doesn't need access to your image - the agent only needs to be able to pull it. + + Additionally, the Dockerfile for your image doesn't need to specify an entry point or command. These will be supplied by the agent when it runs your code using the supplied image. + +- **If using a local agent**, your code must be in a Python environment that can be accessed on the same machine as your agent. + +Additionally, your code doesn't need to use the same version of Dagster as your agent. + diff --git a/docs/docs-beta/docs/dagster-plus/deployment/deployment-types.md b/docs/docs-beta/docs/dagster-plus/deployment/deployment-types.md new file mode 100644 index 0000000000000..f8d3d29a1f9a6 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/deployment-types.md @@ -0,0 +1,5 @@ +--- +unlisted: true +--- + +## Placeholder diff --git a/docs/docs-beta/docs/dagster-plus/deployment/environment-variables.md b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables.md new file mode 100644 index 0000000000000..8c5b831ecc2d3 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables.md @@ -0,0 +1,7 @@ +--- +title: "Environment variables" +displayed_sidebar: "dagsterPlus" +unlisted: true +--- + +# Environment variables diff --git a/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/agent-config.md b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/agent-config.md new file mode 100644 index 0000000000000..5fabc3547076b --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/agent-config.md @@ -0,0 +1,9 @@ +--- +title: "Set environment variables using agent config" +displayed_sidebar: "dagsterPlus" +sidebar_position: 2 +sidebar_label: "Set with agent config" +unlisted: true +--- + +# Set environment variables using agent config diff --git a/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/built-in.md b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/built-in.md new file mode 100644 index 0000000000000..d81a2cbf93468 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/built-in.md @@ -0,0 +1,37 @@ +--- +title: "Built-in environment variables" +displayed_sidebar: "dagsterPlus" +sidebar_position: 3 +sidebar_label: "Built-in variables" +--- + + +[Dagster+](/todo) provides a set of built-in, automatically populated environment variables, such as the name of a deployment or details about a branch deployment commit, that can be used to modify behavior based on environment. + +### All deployment variables + +The following variables are available in every deployment of your Dagster+ instance. + +| Key | Value | +|---|---| +| `DAGSTER_CLOUD_DEPLOYMENT_NAME` | The name of the Dagster+ deployment.

      **Example:** `prod`. | +| `DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT` | `1` if the deployment is a branch deployment. | + + +### Branch deployment variables + +The following environment variables are available only in a [branch deployment](/todo). + +For every commit made to a branch, the following environment variables are available: + +| Key | Value | +|---|---| +| `DAGSTER_CLOUD_GIT_SHA` | The SHA of the commit. | +| `DAGSTER_CLOUD_GIT_TIMESTAMP` | The Unix timestamp in seconds when the commit occurred.

      **Example:** `1724871941` | +| `DAGSTER_CLOUD_GIT_AUTHOR_EMAIL` | The email of the git user who authored the commit. | +| `DAGSTER_CLOUD_GIT_AUTHOR_NAME` | The name of the git user who authored the commit. | +| `DAGSTER_CLOUD_GIT_MESSAGE` | The message associated with the commit. | +| `DAGSTER_CLOUD_GIT_BRANCH` | The name of the branch associated with the commit. | +| `DAGSTER_CLOUD_GIT_REPO` | The name of the repository associated with the commit. | +| `DAGSTER_CLOUD_PULL_REQUEST_ID` | The ID of the pull request associated with the commit. | +| `DAGSTER_CLOUD_PULL_REQUEST_STATUS` | The status of the pull request at the time of the commit.

      **Possible values:** `OPEN`, `CLOSED`, and `MERGED`. | diff --git a/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/dagster-ui.md b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/dagster-ui.md new file mode 100644 index 0000000000000..514a13fda0705 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/environment-variables/dagster-ui.md @@ -0,0 +1,93 @@ +--- +title: "Setting environment variables with the Dagster+ UI" +displayed_sidebar: "dagsterPlus" +sidebar_position: 1 +sidebar_label: "Set with Dagster+ UI" +--- + +Environment variable are key-value pairs that are set outside of your source code. Using environment variables lets you dynamically change the behavior of your application without modifying source code and securely set up secrets. + +Dagster supports several approaches for [accessing environment variable in your code](/todo). You can also set environment variables in several ways, but this guide will focus on the Dagster+ UI. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization Admin**, **Admin**, or **Editor** permissions for your Dagster+ account +- To be using Dagster version 1.0.17 or later + +
      + +## Adding environment variables \{#add} + +Before you begin, use the deployment switcher to select the right deployment. + +1. Click the **+ Add environment variable** button. +2. In the modal that displays, fill in the following: + - **Name** - Enter a name for the environment variable. This is how the variable will be referenced in your code. + - **Value** - Enter a value for the environment variable. + - **Deployment Scope** - select the deployment(s) where the variable should be accessible: + - **Full deployment** - The variable will be available to selected code locations in the full deployment. + - **Branch deployments** - The variable will be available to selected code locations in Branch Deployments. + - **Local** - If selected, the variable will be included when [exporting environment variables to a local `.env` file](#export). + - **Code Location Scope** - select the code location(s) where the variable should be accessible. At least one code location is required. + + +{/* TODO replace placeholder image */} + +![Screenshot of adding environment variables](/img/placeholder.svg) + +3. Click **Save** + +## Editing environment variables \{#edit} + +On the **Environment variables** page, edit an environment variable by clicking the **Edit** button in the **Actions** column. + +## Deleting environment variables \{#delete} + +On the **Environment variables** page, delete an environment variable by clicking the **Trash icon** in the **Actions** column. + +## Viewing environment variable values \{#view} + +On the **Environment variables** page, view an environment variable by clicking the **eye icon** in the **Value** column. To hide the value, click the **eye icon** again. + +:::note +Viewing an environment variable only reveals the value to you. It doesn't show the value in plaintext to all users. If you navigate away from the environment variables page or reload the page, the value will be hidden again. +::: + +## Exporting environment variables locally \{#export} + +1. On the **Environment variables** page, click the **arrow menu** to the right of the **+ Add environment variable** button. +2. Click **Download local environment variables**. +3. A file named `env.txt` will be downloaded. + +To use the downloaded environment variables for local Dagster development: + +1. Rename the downloaded `env.txt` file to `.env`. +2. Move the file to the directory where you run `dagster dev` or `dagster-webserver`. +3. Run `dagster dev`. + +If the environment variables were loaded successfully, you'll see a log message that begins with `Loaded environment variables from .env file`. + +## Setting environment-dependent variable values \{#environment-dependent-values} + +You can create multiple instances of the same environment variable key with different values, allowing you to provide different values to different deployment environments. For example, you may want to use different Snowflake credentials for your production deployment than in branch deployments. + +When you [add an environment variable](#add), you can select the deployment scope and code location scope for the environment variable. You can create multiple environment variables with different values and different scopes to customize the values in different deployment environments. + +For example, if you wanted to provide different Snowflake passwords for your production and branch deployments, you would make two environment variables with the same key: + +- For the **production** environment variable: + - Set the value as the production password, and + - Check only the **Full deployment** box +- For the **branch deployment** environment variable: + - Set the value as the branch deployment password, and + - Check only the **Branch deployments** box + +![Screenshot of environment variables](/img/placeholder.svg) + +## Next steps + +- Learn how to [access environment variables in Dagster code](/todo) +- Learn about the [built-in environment variables](https://docs.dagster.io/dagster-plus/managing-deployments/environment-variables-and-secrets#built-in-environment-variables) provided by Dagster+ diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid.md new file mode 100644 index 0000000000000..631d802667a0c --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid.md @@ -0,0 +1,65 @@ +--- +title: "Hybrid deployment" +displayed_sidebar: "dagsterPlus" +sidebar_position: 2 +--- + +# Hybrid deployment + +In a Dagster+ Hybrid deployment, the orchestration control plane is run by Dagster+ while your Dagster code is executed within your environment. + +[comment]: <> (TODO: Architecture diagram) + +## Get started + +To get started with a Hybrid deployment you'll need to: + +1. Create a [Dagster+ organization](https://dagster.cloud/signup) +2. Install a Dagster+ Hybrid Agent +3. [Add a code location](/dagster-plus/deployment/code-locations), typically using a Git repository and CI/CD + +## Dagster+ Hybrid agents + +The Dagster+ agent is a long-lived process that polls Dagster+'s API servers for new work. + +See the following guides for setting up an agent: + - [Kubernetes](/dagster-plus/deployment/hybrid/agents/kubernetes) + - [AWS ECS](/dagster-plus/deployment/hybrid/agents/amazon-ecs-new-vpc) + - [Docker](/dagster-plus/deployment/hybrid/agents/docker) + - [Locally](/dagster-plus/deployment/hybrid/agents/local) + + +## What you'll see in your environment + +### Code location servers + +Dagster+ runs your Dagster projects through code locations. To get started, follow this guide for [adding a code location](/dagster-plus/deployment/code-locations). + +When you inform Dagster+ about a new code location, we enqueue instructions for your agent to launch a new code server. The agent uses your container image to launch a code server that interacts with your Dagster definitions. The agent will run one long-standing code server for each code location. Once the code server is running, the agent will send Dagster+ metadata about your Dagster definitions that Dagster+ uses to make orchestration decisions. + + +### Runs + +Your definitions might include [automations](/guides/automation) that launch runs or materialize assets. Or your developers might launch runs directly with the web UI. + +When a run needs to be launched, Dagster+ enqueues instructions for your agent to launch a new run. The next time your agent polls Dagster+ for new work, it will see instructions about how to launch your run. It will delegate those instructions to your code server and your code server will launch a run - a new run will typically require its own container. + +Your agent will send Dagster+ metadata letting us know the run has been launched. Your run's container will also send Dagster+ metadata informing us of how the run is progressing. The Dagster+ backend services will monitor this stream of metadata to make additional orchestration decisions, monitor for failure, or send alerts. + +## Security + +Dagster+ hybrid relies on a shared security model. + +The Dagster+ control plane is SOC 2 Type II certified and follows best practices such as: +- encrypting data at rest (AES 256) and in transit (TLS 1.2+) +- highly available, with disaster recovery and backup strategies +- only manages metadata such as pipeline names, execution status, and run duration + +The execution environment is managed by the customer: +- your code never leaves your environment +- all connections to databases, file systems, and other resources are made from your environment +- the execution environment only requires egress access to Dagster+ + +Common security considerations in Dagster+ hybrid include: +- [disabling log forwarding](/todo) +- [managing tokens](/todo) diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-existing-vpc.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-existing-vpc.md new file mode 100644 index 0000000000000..eab7a6cb68660 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-existing-vpc.md @@ -0,0 +1,7 @@ +--- +title: "Amazon ECS agents (Existing VPC)" +displayed_sidebar: "dagsterPlus" +sidebar_position: 11 +sidebar_label: "Amazon ECS (existing)" +unlisted: true +--- diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-new-vpc.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-new-vpc.md new file mode 100644 index 0000000000000..c2e4c4ab150e6 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/amazon-ecs-new-vpc.md @@ -0,0 +1,7 @@ +--- +title: "Amazon ECS agents (New VPC)" +displayed_sidebar: "dagsterPlus" +sidebar_position: 10 +sidebar_label: "Amazon ECS (new)" +unlisted: true +--- diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/docker.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/docker.md new file mode 100644 index 0000000000000..8ec342d1eda9f --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/docker.md @@ -0,0 +1,9 @@ +--- +title: "Docker agents" +displayed_sidebar: "dagsterPlus" +sidebar_position: 20 +sidebar_label: "Docker" +unlisted: true +--- + +# Docker agents diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/kubernetes.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/kubernetes.md new file mode 100644 index 0000000000000..d486407e5d6ba --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/kubernetes.md @@ -0,0 +1,126 @@ +--- +title: "Running Dagster+ agents on Kubernetes" +displayed_sidebar: "dagsterPlus" +sidebar_position: 30 +sidebar_label: "Kubernetes" +--- + +# Running Dagster+ agents on Kubernetes + +This page provides instructions for running the [Dagster+ agent](/todo) on a [Kubernetes](https://kubernetes.io) cluster. + +## Installation + + +### Prerequisites + +You'll need a Kubernetes cluster. This can be a self-hosted Kubernetes cluster or a managed offering like [Amazon EKS](https://aws.amazon.com/eks/), [Azure AKS](https://azure.microsoft.com/en-us/products/kubernetes-service), or [Google GKE](https://cloud.google.com/kubernetes-engine). + +You'll also need access to a container registry to which you can push images and from which pods in the Kubernetes cluster can pull images. This can be a self-hosted registry or a managed offering like [Amazon ECR](https://aws.amazon.com/ecr/), [Azure ACR](https://azure.microsoft.com/en-us/products/container-registry), or [Google GCR](https://cloud.google.com/artifact-registry). + +We recommend installing the Dagster+ agent using [Helm](https://helm.sh). + +## Step 1: create a Kubernetes namespace + +```shell +kubectl create namespace dagster-cloud +``` + +## Step 2: Create an agent token secret + +[Generate an agent token](/dagster-plus/deployment/hybrid/tokens) and set it as a Kubernetes secret: + +```shell +kubectl --namespace dagster-cloud create secret generic dagster-cloud-agent-token --from-literal=DAGSTER_CLOUD_AGENT_TOKEN= +``` + +## Step 3: Add the Dagster+ agent Helm chart repository + +```shell +helm repo add dagster-cloud https://dagster-io.github.io/helm-user-cloud +helm repo update +``` + +## Step 4: Install the Dagster+ agent Helm chart + +```shell +helm --namespace dagster-cloud install agent --install dagster-cloud/dagster-cloud-agent +``` + +## Upgrading + +You can use Helm to do rolling upgrades of your Dagster+ agent + +```yaml +# values.yaml +dagsterCloudAgent: + image: + tag: latest +``` + +```shell +helm --namespace dagster-cloud upgrade agent \ + dagster-cloud/dagster-cloud-agent \ + --values ./values.yaml +``` + +## Common configurations + +You can customize your Dagster+ agent using [Helm values](https://artifacthub.io/packages/helm/dagster-cloud/dagster-cloud-agent?modal=values). Some common configuration include + +### Configuring your agents to serve branch deployments + +[Branch deployments](/dagster-plus/deployment/branch-deployments) are lightweight staging environments created for each code change. To configure your Dagster+ agent to manage them: + +```yaml +# values.yaml +dagsterCloud: + branchDeployment: true +``` + +```shell +helm --namespace dagster-cloud upgrade agent \ + dagster-cloud/dagster-cloud-agent \ + --values ./values.yaml +``` + +### High availability configurations + +You can configure your Dagster+ agent to run with multiple replicas. Work will be load balanced across all replicas. + +```yaml +# values.yaml +dagsterCloudAgent: + replicas: 2 +``` + +```shell +helm --namespace dagster-cloud upgrade agent \ + dagster-cloud/dagster-cloud-agent \ + --values ./values.yaml +``` + +Work load balanced across agents isn't not sticky; there's no guarantee the agent that launched a run will be the same one to receive instructions to terminate it. This is fine if both replicas run on the same Kubernetes cluster because either agent can terminate the run. But if your agents are physically isolated (for example, they run on two different Kubernetes clusters), you should configure: + +```yaml +# values.yaml +isolatedAgents: true +``` + +```shell +helm --namespace dagster-cloud upgrade agent \ + dagster-cloud/dagster-cloud-agent \ + --values ./values.yaml +``` + +## Troubleshooting tips + +You can see basic health information about your agent in the Dagster+ UI: + +![Screenshot of agent health information](/img/placeholder.svg) + +### View logs + +```shell +kubectl --namespace dagster-cloud logs -l deployment=agent +``` diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/local.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/local.md new file mode 100644 index 0000000000000..5ae3fc34d5020 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/local.md @@ -0,0 +1,75 @@ +--- +title: "Running a Dagster+ agent locally" +displayed_sidebar: "dagsterPlus" +sidebar_position: 40 +sidebar_label: "Local" +--- + +Local agents are a good way to experiment with Dagster+ before deploying a more scalable Hybrid agent like [Kubernetes](/dagster-plus/deployment/hybrid/agents/kubernetes) or [Amazon ECS](/todo). + +:::note +Local agents aren't well suited for most production use cases. If you're running the local agent in production, make sure that: + +- You've set up a supervisor to automatically restart the agent process if it crashes +- You're alerted if the VM or container dies, or to automatically restart it +::: + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization Admin** permissions in your Dagster+ account +- **To install the `dagster-cloud` CLI** in the same environment where the agent will run. We recommend using a Python virtual environment for your Dagster application code and its dependencies. + + ```bash + pip install dagster-cloud + ``` +
      + +## Step 1: Generate an agent token + +Your local agent will need a token to authenticate with your Dagster+ account. To generate an agent token: + +1. Click the **user menu (your icon) > Organization Settings**. +2. In the **Organization Settings** page, click the **Tokens** tab. +3. Click the **+ Create agent token** button. +4. After the token has been created, click **Reveal token**. +5. Save this token as an environment variable on the machine where the local agent will run. You can choose any name for this environment variable, but `$DAGSTER_AGENT_TOKEN` will be used in the rest of this guide. +6. Give the agent token a description to distinguish it from other tokens in the future. + +## Step 2: Configure the local agent + +1. Create a directory to act as your Dagster home. This guide uses `~/dagster_home`, but the directory can be located wherever you want. +2. In the new directory, create a `dagster.yaml` file with the following: + +3. In the file, fill in the following: + - `agent_token.env` - The name of the environment variable storing the agent token you created in Step 1. + - `deployment` - The name of the deployment associated with this instance of the agent. In the preceding example, `prod` was used as the deployment. +4. Save the file. + +For more information about `dagster.yaml` configuration options, check out the [`dagster.yaml` reference](/todo). + +### Alternative methods for setting the agent token + +If you prefer not to specify the agent token by using an environment variable in `dagster.yaml`, pass it to the `dagster-cloud agent run` command: + +```bash +dagster-cloud agent run ~/dagster_home/ --agent-token +``` + +## Step 3: Run the agent + +To start the agent, run the following command and pass the path to the `dagster.yaml` file you created in Step 2: + +```bash +dagster-cloud agent run ~/dagster_home/ +``` + +To view the agent in Dagster+, click the Dagster icon in the top left to navigate to the **Deployment > Agents** page. You should see the agent running in the **Agent statuses** section: + +![Screenshot of Dagster Asset Lineage](/img/placeholder.svg) + +## Next steps + +- Add a [code location](/todo) to your Dagster+ deployment diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/multiple.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/multiple.md new file mode 100644 index 0000000000000..f41c22e684a55 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/multiple.md @@ -0,0 +1,8 @@ +--- +title: "Using multiple agents" +displayed_sidebar: "dagsterPlus" +sidebar_position: 50 +unlisted: true +--- + +# Use multiple agents with Dagster+ diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/settings.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/settings.md new file mode 100644 index 0000000000000..c086f9b11a154 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/agents/settings.md @@ -0,0 +1,9 @@ +--- +title: "Hybrid agent settings" +displayed_sidebar: "dagsterPlus" +sidebar_position: 60 +sidebar_label: "Settings" +unlisted: true +--- + +# Hybrid agent settings diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/architecture.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/architecture.md new file mode 100644 index 0000000000000..5d012be9199e0 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/architecture.md @@ -0,0 +1,90 @@ +--- +title: 'Dagster+ Hybrid architecture' +displayed_sidebar: 'dagsterPlus' +sidebar_position: 10 +--- + +# Dagster+ Hybrid architecture + +The Hybrid architecture is the most flexible and secure way to deploy Dagster+. It allows you to run your user code in your environment while leveraging Dagster+'s infrastructure for orchestration and metadata management + +
      + Pre-requisites + +Before you begin, you should have: + +- A [Dagster+ account](/dagster-plus/getting-started) +- [Basic familiarity with Dagster](/getting-started/quickstart) + +
      + +--- + +## Hybrid architecture overview + +A **hybrid deployment** utilizes a combination of your infrastructure and Dagster-hosted backend services. + +The Dagster backend services - including the web frontend, GraphQL API, metadata database, and daemons (responsible for executing schedules and sensors) - are hosted in Dagster+. You are responsible for running an [agent](/todo) in your environment. + +![Dagster+ Hybrid deployment architecture](/img/placeholder.svg) + +Work is enqueued for your agent when: + +- Users interact with the web front end, +- The GraphQL API is queried, or +- Schedules and sensors tick + +The agent polls the agent API to see if any work needs to be done and launches user code as appropriate to fulfill requests. User code then streams metadata back to the agent API (GraphQL over HTTPS) to make it available in Dagster+. + +All user code runs within your environment, in isolation from Dagster system code. + +--- + +## The agent + +Because the agent communicates with the Dagster+ control plane over the agent API, it's possible to support agents that operate in arbitrary compute environments. + +This means that over time, Dagster+'s support for different user deployment environments will expand and custom agents can take advantage of bespoke compute environments such as HPC. + +Refer to the [Agents documentation](/todo) for more info, including the agents that are currently supported. + +--- + +## Security + +This section describes how Dagster+ interacts with user code. To summarize: + +- No ingress is required from Dagster+ to user environments +- Dagster+ doesn't have access to user code. Metadata about the code is fetched over constrained APIs. +- The Dagster+ agent is [open source and auditable](https://github.com/dagster-io/dagster-cloud) + +These highlights are described in more detail below: + +- [Interactions and queries](#interactions-and-queries) +- [Runs](#runs) +- [Ingress](#ingress) + +### Interactions and queries + +When Dagster+ needs to interact with user code - for instance, to display the structure of a job in the Dagster+ user interface, to run the body of a sensor definition, or to launch a run for a job - it enqueues a message for the Dagster+ Agent. The Dagster+ Agent picks up this message and then launches or queries user code running on the appropriate compute substrate. + +Depending on the agent implementation, user code may run in isolated OS processes, in Docker containers, in ECS Tasks, in Kubernetes Jobs and Services, or in a custom isolation strategy. + +Queries to user code run over a well-defined gRPC interface. Dagster+ uses this interface to: + +- Retrieve the names, config schemas, descriptions, tags, and structures of jobs, ops, repositories, partitions, schedules, and sensors defined in your code +- Evaluate schedule and sensor ticks and determine whether a run should be launched + +When the agent queries user code, it writes the response back to Dagster+ over a well-defined GraphQL interface. + +### Runs + +Runs are launched by calling the `dagster api` CLI command in a separate process/container as appropriate to the agent type. Run termination is handled by interrupting the user code process/container as appropriate for the compute substrate. + +When runs are launched, the user code process/container streams structured metadata (containing everything that's viewable in the integrated logs viewer in the Dagster+ UI) back to Dagster+ over a well-defined GraphQL interface. Structured metadata is stored in Amazon RDS, encrypted at rest. + +By default, the run worker also uploads the compute logs (raw `stdout` and `stderr` from runs) to Dagster+. If you don't want to upload logs, you can disable this feature in the [agent settings](/dagster-plus/deployment/hybrid/agents/settings). + +### Ingress + +No ingress is required from Dagster+ to user environments. All network requests are outbound from user environments to Dagster+. diff --git a/docs/docs-beta/docs/dagster-plus/deployment/hybrid/tokens.md b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/tokens.md new file mode 100644 index 0000000000000..ea705c4240bd8 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/hybrid/tokens.md @@ -0,0 +1,8 @@ +--- +title: 'Hybrid agent tokens' +displayed_sidebar: 'dagsterPlus' +sidebar_position: 30 +unlisted: true +--- + +# Hybrid agent tokens diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless.md new file mode 100644 index 0000000000000..4bd69f235cb08 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless.md @@ -0,0 +1,8 @@ +--- +title: "Serverless deployment" +displayed_sidebar: "dagsterPlus" +sidebar_position: 1 +unlisted: true +--- + +# Serverless deployment diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless/dagster-ips.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless/dagster-ips.md new file mode 100644 index 0000000000000..9ee0185bc263a --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless/dagster-ips.md @@ -0,0 +1,8 @@ +--- +title: "Dagster+ IP addresses" +displayed_sidebar: "dagsterPlus" +sidebar_position: 20 +unlisted: true +--- + +# Dagster+ IP addresses diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless/run-isolation.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless/run-isolation.md new file mode 100644 index 0000000000000..2530585dfafef --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless/run-isolation.md @@ -0,0 +1,68 @@ +--- +title: 'Serverless run isolation' +displayed_sidebar: 'dagsterPlus' +sidebar_label: 'Run isolation' +sidebar_position: 40 +--- + +# Serverless run isolation + +Dagster+ Serverless offers two settings for run isolation: isolated and non-isolated. Isolated runs are the default and are intended for production, while non-isolated runs are intended for faster iteration during development. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- An active Dagster+ Serverless deployment +- An understanding of [Dagster+ deployment settings](/dagster-plus/deployment/settings) +
      + +--- + +## Differences between isolated and non-isolated runs + +- [**Isolated runs**](#isolated-runs-default) execute in their own container. They're the default and are intended for production and compute-heavy use cases. +- [**Non-isolated runs**](#non-isolated-runs) trade off isolation for speed. They must be launched manually and are intended for fast iteration during development. + +## Isolated runs (default) + +Isolated runs each take place in their own container with their own compute resources: 4 vCPUs and 16GB of RAM. + +These runs may take up to 3 minutes to start while these resources are provisioned. + +When launching runs manually, select `Isolate run environment` in the Launchpad to launch an isolated runs. + +:::note + +If non-isolated runs are disabled (see the section below), the toggle won't appear and all runs will be isolated. +::: + +## Non-isolated runs + +This can be enabled or disabled in deployment settings with + +```yaml +non_isolated_runs: + enabled: True +``` + +Non-isolated runs provide a faster start time by using a standing, shared container for each code location. + +They have fewer compute resources: 0.25 vCPUs and 1GB of RAM. These resources are shared with other processes running within a code location like sensors. As a result, it's recommended to use isolated runs for compute intensive jobs and asset materializations. + +While launching runs from the Launchpad, un-check `Isolate run environment`. When materializing an asset, shift-click `Materialize all` to open the Launchpad and un-check the `Isolate run environment` checkbox. + +:::warning + +To avoid crashes and memory exhaustion, only one non-isolated run will execute at once by default. While a non-isolated run is in progress, the Launchpad will only allow isolated runs to be launched. + +This limit can be configured in [deployment settings](/dagster-plus/deployment/settings). + +```yaml +non_isolated_runs: + enabled: True + max_concurrent_non_isolated_runs: 1 +``` + +::: diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless/runtime-environment.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless/runtime-environment.md new file mode 100644 index 0000000000000..70c5edd73c201 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless/runtime-environment.md @@ -0,0 +1,167 @@ +--- +title: "Serverless runtime environment" +displayed_sidebar: "dagsterPlus" +sidebar_label: "Runtime environment" +sidebar_position: 30 +--- + +With a Dagster+ Serverless deployment, you can customize the runtime environment where your code executes. You may want to: +- [Use a different Python version](#python-version). +- [Use a different base image](#base-image). +- [Include data files](#data-files). + +Dagster uses [PEX](https://docs.pex-tool.org/) to package your code and deploy them on Docker images. You also have the option to [disable PEX-based deploys](#disable-pex) and deploy using a Docker image instead of PEX. + + +## Use a different Python version \{#python-version} +The default Python version for Dagster+ Serverless is Python 3.8. Python versions 3.9 through 3.12 are also supported. You can specify the Python version you want to use in your GitHub or GitLab workflow, or by using the `dagster-cloud` CLI. + + + +In your `.github/workflows/deploy.yml` file, update the `PYTHON_VERSION` environment variable with your desired Python version: + + + + +1. Open your `.gitlab-ci.yml` file. If your `.gitlab-ci.yml` contains an `include` with a link to a Dagster provided CI/CD template: + + + Follow the link and replace the contents of your `.gitlab-ci.yml` with the YAML document at the link address. Otherwise, continue to the next step. + +3. Update the `PYTHON_VERSION` environment variable with your desired Python version + + + + + +You can specify the Python version when you deploy your code with the `dagster-cloud serverless deploy-python-executable` command: +```bash +dagster-cloud serverless deploy-python-executable --python-version=3.11 --location-name=my_location +``` + + + + +## Use a different base image \{#base-image} +When possible, you should add dependencies by including the corresponding Python libraries in your Dagster project's `setup.py` file: + + +When adding dependencies with the `setup.py` file isn't possible, you can build a custom base image: + +:::note +Setting a custom base image isn't supported for GitLab CI/CD workflows out of the box, but you can write a custom GitLab CI/CD yaml file that implements the manual steps noted. +::: + +1. Include `dagster-cloud[serverless]` as a dependency in your Docker image by adding the following line to your `Dockerfile`: + ``` + RUN pip install "dagster-cloud[serverless]" + ``` +2. Build your Docker image, using your usual Docker toolchain. +3. Upload your Docker image to Dagster+ using the `upload-base-image` command. This command will print out the tag used in Dagster+ to identify your image: + ```bash + $ dagster-cloud serverless upload-base-image local-image:tag + + ... + To use the uploaded image run: dagster-cloud serverless deploy-python-executable ... --base-image-tag=sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 + ``` + +4. Specify this base image tag in you GitHub workflow, or using the `dagster-cloud` CLI: + + + In your `.github/workflows/deploy.yml` file, add the `SERVERLESS_BASE_IMAGE_TAG` environment variable and set it to the tag printed out in the previous step: + + + + + You can specify the base image when you deploy your code with the `dagster-cloud serverless deploy-python-executable` command: + ```bash + dagster-cloud serverless deploy-python-executable \ + --base-image-tag=sha256_518ad2f92b078c63c60e89f0310f13f19d3a1c7ea9e1976d67d59fcb7040d0d6 \ + --location-name=my_location + ``` + + + +## Include data files \{#data-files} +To add data files to your deployment, use the [Data Files Support](https://setuptools.pypa.io/en/latest/userguide/datafiles.html) built into Python's `setup.py`. This requires adding a `package_data` or `include_package_data` keyword in the call to `setup()` in `setup.py`. For example, given this directory structure: + +``` +- setup.py +- quickstart_etl/ + - __init__.py + - definitions.py + - data/ + - file1.txt + - file2.csv +``` + +If you want to include the data folder, modify your `setup.py` to add the `package_data` line: + + +## Disable PEX deploys \{#disable-pex} + +Prior to using PEX files, Dagster+ deployed code using Docker images. This feature is still available. + +You can disable PEX in your GitHub or GitLab workflow, or by using the `dagster-cloud` CLI. + + + +In your `.github/workflows/deploy.yml` file, update the `ENABLE_FAST_DEPLOYS` environment variable to `false`: + + + + +1. Open your `.gitlab-ci.yml` file. If your `.gitlab-ci.yml` contains an `include` with a link to a Dagster provided CI/CD template: + + + Follow the link and replace the contents of your `.gitlab-ci.yml` with the YAML document at the link address. Otherwise, continue to the next step. + +3. Update the `DISABLE_FAST_DEPLOYS` variable to `true` + + + + + +You can deploy using a Docker image instead of PEX by using the `dagster-cloud serverless deploy` command instead of the `dagster-cloud-serverless deploy-python-executable` command: + +```bash +dagster-cloud serverless deploy --location-name=my_location +``` + + + + +You can customize the Docker image using lifecycle hooks or by customizing the base image: + + + +This method is the easiest to set up, and doesn't require setting up any additional infrastructure. + +In the root of your repo, you can provide two optional shell scripts: `dagster_cloud_pre_install.sh` and `dagster_cloud_post_install.sh`. These will run before and after Python dependencies are installed. They're useful for installing any non-Python dependencies or otherwise configuring your environment. + + + +This method is the most flexible, but requires setting up a pipeline outside of Dagster to build a custom base image. + +:::note +Setting a custom base image isn't supported for GitLab CI/CD workflows out of the box, but you can write a custom GitLab CI/CD yaml file that implements the manual steps noted. +::: + +1. Build you base image +2. Specify this base image tag in your GitHub workflow, or using the `dagster-cloud` CLI: + + + In your `.github/workflows/deploy.yml` file, add the `SERVERLESS_BASE_IMAGE_TAG` environment variable and set it to the tag printed out in the previous step: + + + + + You can specify the base image when you deploy your code with the `dagster-cloud serverless deploy` command: + ```bash + dagster-cloud serverless deploy --base-image=my_base_image:latest --location-name=my_location + ``` + + + + + diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless/security.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless/security.md new file mode 100644 index 0000000000000..b1e8ffec101f0 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless/security.md @@ -0,0 +1,8 @@ +--- +title: "Security & data protection" +displayed_sidebar: "dagsterPlus" +sidebar_position: 10 +unlisted: true +--- + +# Serverless security & data protection diff --git a/docs/docs-beta/docs/dagster-plus/deployment/serverless/transition-hybrid.md b/docs/docs-beta/docs/dagster-plus/deployment/serverless/transition-hybrid.md new file mode 100644 index 0000000000000..6790e20c2cbf5 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/serverless/transition-hybrid.md @@ -0,0 +1,48 @@ +--- +title: "Transitioning to Hybrid" +displayed_sidebar: "dagsterPlus" +sidebar_position: 50 +--- + +After utilizing a Dagster+ [Serverless](/dagster-plus/deployment/serverless) deployment, you may decide to leverage your own infrastructure to execute your code. Transitioning to a Hybrid deployment requires only a few steps and can be done without any loss of execution history or metadata, allowing you to maintain continuity and control over your operations. + +:::warning +Transitioning from Serverless to Hybrid requires some downtime, as your Dagster+ deployment won't have an agent to execute user code. +::: + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **Organization Admin** permissions in your Dagster+ account + +
      + +## Step 1: Deactivate your Serverless agent + +1. In the Dagster+ UI, navigate to the **Deployment > Agents** page. +2. Click the drop down arrow on the right of the page and select **Switch to Hybrid**. + +![PRODUCT NOTE - this arrow drop down is pretty small and easy to confuse with the one in the row for the agent](/img/placeholder.svg) + +It may take a few minutes for the agent to deactivate and be removed from the list of agents. + +## Step 2: Create a Hybrid agent + +Next, you'll need to create a Hybrid agent to execute your code. Follow the setup instructions for the agent of your choice: + +- **[Amazon Web Services (AWS)](/todo)**, which launches user code as Amazon Elastic Container Service (ECS) tasks. +- **[Docker](/dagster-plus/deployment/hybrid/agents/docker)**, which launches user code in Docker containers on your machine +- **[Kubernetes](/dagster-plus/deployment/hybrid/agents/kubernetes)**, which launches user code on a Kubernetes cluster +- **[Local](/dagster-plus/deployment/hybrid/agents/local)**, which launches user code in operating system subprocesses on your machine + +## Step 3: Confirm successful setup + +Once you've set up a Hybrid agent, navigate to the **Deployment > Agents** page in the UI. The new agent should display in the list with a `RUNNING` status: + +![Screenshot](/img/placeholder.svg) + +## Next steps + +- Learn about the configuration options for [dagster.yaml](/todo) diff --git a/docs/docs-beta/docs/dagster-plus/deployment/settings.md b/docs/docs-beta/docs/dagster-plus/deployment/settings.md new file mode 100644 index 0000000000000..355740e8e2e97 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/settings.md @@ -0,0 +1,8 @@ +--- +title: "Deployment settings" +displayed_sidebar: "dagsterPlus" +sidebar_label: "Settings" +unlisted: true +--- + +# Deployment settings diff --git a/docs/docs-beta/docs/dagster-plus/deployment/tokens.md b/docs/docs-beta/docs/dagster-plus/deployment/tokens.md new file mode 100644 index 0000000000000..62a79168b2352 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/deployment/tokens.md @@ -0,0 +1,7 @@ +--- +title: "Tokens" +displayed_sidebar: "dagsterPlus" +unlisted: true +--- + +# Managing user and agent tokens in Dagster+ diff --git a/docs/docs-beta/docs/dagster-plus/getting-started.md b/docs/docs-beta/docs/dagster-plus/getting-started.md new file mode 100644 index 0000000000000..afe3e4f4666bc --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/getting-started.md @@ -0,0 +1,61 @@ +--- +title: "Getting started with Dagster+" +displayed_sidebar: "dagsterPlus" +--- + +# Get started with Dagster+ + +First [create a Dagster+ organization](https://dagster.plus/signup). Note: you can sign up with: +- a Google email address +- a GitHub account +- a one-time email link, great if you are using a corporate email. You can setup SSO after completing these steps. + +Next, pick your deployment type. Not sure? + +- [Dagster+ Serverless](/dagster-plus/deployment/serverless) is the easiest way to get started and is great for teams with limited DevOps support. In Dagster+ Serverless, your Dagster code is executed in Dagster+. You will need to be okay [giving Dagster+ the credentials](/dagster-plus/deployment/environment-variables) to connect to the tools you want to orchestrate. + +- [Dagster+ Hybrid](/dagster-plus/deployment/hybrid) is great for teams who want to orchestrate tools without giving Dagster+ direct access to your systems. Dagster+ Hybrid requires more DevOps support. + +The remaining steps depend on your deployment type. + + + + +We recommend following the steps in Dagster+ to add a new project. + +![Screenshot of Dagster+ serverless NUX](/img/placeholder.svg) + +The Dagster+ on-boarding will guide you through: +- creating a Git repository containing your Dagster code +- setting up the necessary CI/CD actions to deploy that repository to Dagster+ + +:::tip +If you don't have any Dagster code yet, you will have the option to select an example quickstart project or import an existing dbt project +::: + +See the guide on [adding code locations](/dagster-plus/deployment/code-locations) for details. + + + + +## Install a Dagster+ Hybrid agent + +Follow [these guides](/dagster-plus/deployment/hybrid) for installing a Dagster+ Hybrid agent. Not sure which agent to pick? We recommend using the Dagster+ Kubernetes agent in most cases. + + +## Setup CI/CD + +In most cases, your CI/CD process will be responsible for: +- building your Dagster code into a Docker image +- pushing your Docker image to a container registry you manage +- notifying Dagster+ of the new or updated code + +Refer to the guide for [adding a code location](/dagster-plus/deployment/code-locations) for more detail. + + + + + +## Next steps + +Your Dagster+ account is automatically enrolled in a trial. You can [pick your plan type and enter your billing information](/dagster-plus/settings), or [contact the Dagster team](https://dagster.io/contact) if you need support or want to evaluate the Dagster+ Pro plan. diff --git a/docs/docs-beta/docs/dagster-plus/insights.md b/docs/docs-beta/docs/dagster-plus/insights.md new file mode 100644 index 0000000000000..6819907cada01 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/insights.md @@ -0,0 +1,71 @@ +--- +title: 'Insights' +description: 'Visiblity into historical usage, cost, and metadata.' +--- + +# Dagster+ Insights + +Using Dagster+ Insights, you can gain visibility into historical usage and cost metrics such as Dagster+ run duration, credit usage, and failures. You can also define your own custom metrics, such as the number of rows processed by an asset. + +Visualizations are built into the Dagster+ UI, allowing you to explore metrics from Dagster and external systems, such as Google BigQuery, in one place. + +### With Insights, you can + +- [Explore usage trends in your Dagster pipelines](#explore-dagsters-built-in-metrics) +- [Integrate additional metrics](#integrate-metrics), like data warehouse cost or your own custom metadata +- [Export metrics](#export-metrics) from Dagster+ +- [Create alerts](/dagster-plus/deployment/alerts) based off of Insights metrics TODO: write this alerts section + +
      + Prerequisites + +To use Insights, you'll need a Dagster+ account. + +
      + +## Explore Dagster's built-in metrics + +To access Insights, click **Insights** in the top navigation bar in the UI: + +![Viewing the Insights tab in the Dagster+ UI](/img/placeholder.svg) + +The left navigation panel on this page contains a list of available metrics. For each metric, the daily, weekly, or monthly aggregated values are displayed in the graph. + +Use the tabs above the charts to view metrics for **Assets**, **Asset groups**, **Jobs**, and **Deployments**. + +These metrics are updated on a daily basis. Refer to the [Built-in metrics](#built-in-metrics) section for more information about what Dagster provides out of the box. + +## Working with Insights metrics \{#insights-metrics} + +### Data retention + +How long historical Insights data is retained depends on your Dagster+ plan: + +- **Dagster+ Pro** - 120 days +- **All other plans** - 30 days + +### Built-in metrics + +| Metric | Description | +| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Dagster credits | The Dagster credit cost associated with computing this object. Dagster credits are charged for every step that's run, and for every asset that's materialized. For more information, [refer to the pricing FAQ](https://dagster.io/pricing#faq). | +| Compute duration | The time spent computing steps. For jobs that run steps in parallel, the compute duration may be longer than the wall clock time it takes for the run to complete. | +| Materializations | The number of asset materializations associated with computing this object. | +| Observations | The number of [asset observations](/todo) associated with computing this object. | +| Step failures | The number of times steps failed when computing this object. **Note**: Steps that retry and succeed aren't included in this metric. | +| Step retries | The number of times steps were retried when computing this object. | +| Asset check warnings | The number of [asset checks](/todo) that produced warnings. | +| Asset check errors | The number of [asset checks](/todo) that produced errors. | +| Retry compute | The time spent computing steps, including time spent retrying failed steps. For jobs that run steps in parallel, the compute duration may be longer than the wall clock time it takes for the run to complete. | + +### Integrate other metrics \{#integrate-metrics} + +Users on the Pro plan can integration other metrics into Insights, such as asset materialization metadata or Snowflake credits. Insights supports the following additional metrics: + +- **Asset materialization metadata.** Refer to the [Using asset metadata with Dagster+ Insights guide](/dagster-plus/insights/asset-metadata) for more info. +- [**Google BigQuery usage**](/dagster-plus/insights/google-bigquery) generated by either queries made to BigQuery resources or using dbt to materialize tables +- [**Snowflake usage**](/dagster-plus/insights/snowflake) generated by either queries made to Snowflake resources or using dbt to materialize tables + +### Export metrics + +Metrics in Dagster+ Insights can be exported using a GraphQL API endpoint. Refer to the [Exporting Insights metrics from Dagster+ guide](/dagster-plus/insights/export-metrics) for details. diff --git a/docs/docs-beta/docs/dagster-plus/insights/asset-metadata.md b/docs/docs-beta/docs/dagster-plus/insights/asset-metadata.md new file mode 100644 index 0000000000000..50381d318fd68 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/insights/asset-metadata.md @@ -0,0 +1,58 @@ +--- +title: "Integrate asset metadata into Dagster+ Insights" +sidebar_label: "Integrate asset metadata" +sidebar_position: 1 +--- + +Out of the box, Dagster+ Insights gives you visibility into a variety of common metrics across your data platform. +By creating custom metrics from asset metadata, you can use Insights to perform historical aggregation on any +data your assets can emit. + +
      + Prerequisites + +To follow the steps in this guide, you'll need a Dagster+ account on the Pro plan. + +
      + +## Step 1: Emit numeric metadata on your assets at runtime + +You'll need one or more assets that emit the same metadata key at run time. Insights metrics +are most valuable when you have multiple assets that emit the same kind of metadata, such as +such as the number of rows processed or the size of a file uploaded to object storage. + +Follow [the metadata guide](/guides/metadata#runtime-metadata) to add numeric metadata +to your asset materializations. + +## Step 2: Enable viewing your metadata in Dagster+ Insights + +Once your assets are emitting numeric metadata values, you'll be able to enable viewing them in the Insights UI. + +To add your metadata key to the list of metrics shown in Insights, click **Edit** in the sidebar next to the **User provided metrics** header: + +![Viewing the Insights tab in the Dagster+ UI](/img/placeholder.svg) +{/* + + + + + + + + +## Tracking usage with dagster-dbt + +If you use `dagster-dbt` to manage a dbt project that targets Google BigQuery, you can emit usage metrics to the Dagster+ API with the `DbtCliResource`. + +First, add a `.with_insights()` call to your `dbt.cli()` command(s). + + + + + + + + + + +Then, add the following to your `dbt_project.yml`: + + + + + + + + + + +This adds a comment to each query, which is used by Dagster+ to attribute cost metrics to the correct assets. diff --git a/docs/docs-beta/docs/dagster-plus/insights/snowflake.md b/docs/docs-beta/docs/dagster-plus/insights/snowflake.md new file mode 100644 index 0000000000000..5d5674652974e --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/insights/snowflake.md @@ -0,0 +1,84 @@ +--- +title: "Track Snowflake usage with Dagster+ Insights" +sidebar_label: "Snowflake" +sidebar_position: 2 +--- + +Dagster allows you to track external metrics, such as Snowflake usage, in the Insights UI. Out of the box integrations are provided to capture query runtime and billed usage, and associate them with the relevant assets or jobs. + +## Requirements + +To use these features, you will need: + +- A Dagster+ account on the **Pro** plan +- Access to the [Dagster+ Insights feature](/dagster-plus/insights) +- Snowflake credentials which have access to the **`snowflake.account_usage.query_history`**. + - For more information, see the [Snowflake Documentation](https://docs.snowflake.com/en/sql-reference/account-usage#enabling-the-snowflake-database-usage-for-other-roles) +- The following packages installed: + +```bash +pip install dagster dagster-cloud dagster-snowflake +``` + +## Limitations + +- Up to two million individual data points may be added to Insights, per month +- External metrics data will be retained for 120 days +- Insights data may take up to 24 hours to appear in the UI + +## Tracking usage with the SnowflakeResource + +The `dagster-cloud` package provides an `InsightsSnowflakeResource`, which is a drop-in replacement for the `SnowflakeResource` provided by `dagster-snowflake`. + +This resource will emit Snowflake usage metrics to the Dagster+ Insights API whenever it makes a query. + +To enable this behavior, replace usage of `SnowflakeResource` with `InsightsSnowflakeResource`, and add Snowflake-specific insights definitions to your code using `create_snowflake_insights_asset_and_schedule`. + +These additional definitions are required because Snowflake usage information is only available after a delay. These definitions automatically handle running a computation on a schedule to ingest Snowflake usage information from the previous hour. + +:::note +Only use `create_snowflake_insights_asset_and_schedule` in a single code location per deployment, as this will handle ingesting usage data from your entire deployment. +::: + + + + + + + + + + +## Tracking usage with dagster-dbt + +If you use `dagster-dbt` to manage a dbt project that targets Snowflake, you can emit usage metrics to the Dagster+ API with the `DbtCliResource`. + +First, add a `.with_insights()` call to your `dbt.cli()` command(s), and add Snowflake-specific insights definitions to your code using `create_snowflake_insights_asset_and_schedule`. + +These additional definitions are required because Snowflake usage information is only available after a delay. These definitions automatically handle running a computation on a schedule to ingest Snowflake usage information from the previous hour. + +:::note +Only use `create_snowflake_insights_asset_and_schedule` in a single code location per deployment, as this will handle ingesting usage data from your entire deployment. +::: + + + + + + + + + + +Then, add the following to your `dbt_project.yml`: + + + + + + + + This adds a comment to each query, which is used by Dagster+ to attribute cost metrics to the correct assets. + + + diff --git a/docs/docs-beta/docs/dagster-plus/multi-tenancy.md b/docs/docs-beta/docs/dagster-plus/multi-tenancy.md new file mode 100644 index 0000000000000..cc3192d1eebb4 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/multi-tenancy.md @@ -0,0 +1,6 @@ +--- +title: "Multi-tenancy" +unlisted: true +--- + +# Dagster+ multi-tenancy diff --git a/docs/docs-beta/docs/dagster-plus/saved-views.md b/docs/docs-beta/docs/dagster-plus/saved-views.md new file mode 100644 index 0000000000000..3f3c0a7bc1ef4 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/saved-views.md @@ -0,0 +1,68 @@ +--- +title: "Saving asset filters as catalog views" +displayed_sidebar: "dagsterPlus" +--- + +# Saving asset filters as catalog views + +Catalog views enable you to filter down your view of the Dagster Asset catalog in Dagster+, allowing you to toggle between sets of assets that you care about most. + +You can save catalog views for your own use or share them with your team. For example, you could create views that: + +- Filter assets based on ownership to only show those owned by your team +- Filter assets based on the asset kind to give insight into the status of your ELT ingestion +- Display assets with a "gold" medallion tag, showing only refined, high-quality data that analysts can use with confidence analysts can use with confidence + +In this guide, you'll learn how to create, access, and share catalog views with others. + +
      +Prerequisites + +- **Organization Admin**, **Admin**, or **Editor** permissions on Dagster+ +- Familiarity with [Assets](/concepts/assets) and [Asset metadata](/guides/metadata) + +
      + + + + +## Create catalog views + +To view the Dagster+ Asset catalog, use the **Catalog** button on the top navigation. + +In any Dagster+ catalog page, you can access the current catalog view, or create a new catalog view with the catalog view button on the top left of the screen. By default, this button is labeled **All assets**, and has a globe icon. + +![Screenshot of the catalog view dropdown](/img/placeholder.svg) + +To create a new catalog view, you have two options: +1. Create a new catalog view from scratch, from the catalog view menu. +2. Create a new catalog view from your current set of filters. + +### Create a new catalog view from scratch + +1. Click the catalog view button to open the catalog view menu. From here, click the **New** button. +2. Give the view a name and optionally, a description and icon. +3. Click **Add filters** to select filters to apply to the view. Filters can select a subset of assets based on their metadata, tags, kinds, owners, asset groups, or other properties. +4. To make the view shareable, toggle the **Public view** switch. +5. Click **Create view** to create the view. + +![Screenshot of catalog view customization UI](/img/placeholder.svg) + +Give your view a name and optionally a description and icon. Next, you can select one or more filters to apply to your view by clicking the "Add filters" button. Filters can select a subset of assets based on their metadata, tags, kinds, owners, asset groups, or other properties. + +### Create a new catalog view from your current set of filters + +When viewing the global asset lineage or asset list, you can create a new catalog view from your current set of filters. + +1. On these pages, select one or more asset filters. +2. Click **Create new catalog view**, located near the top right of the page. This will open the catalog view creation dialog with your current filters pre-populated. +3. Give the view a name and optionally, a description and icon. +4. To make the view shareable, toggle the **Public view** switch. +5. Click **Create view** to create the view. + +## Edit, duplicate, or delete catalog views + +1. Click the catalog view button to open the catalog view menu. +2. Search for the view you want to edit, duplicate, or delete. +3. Click the **three dot menu** to the right of the view to display available options. +4. If modifying the view, note that any active filters will automatically be included in the set of changes. You can also change the view's name, description, icon, and sharing settings. 5. When finished, click **Save changes**. diff --git a/docs/docs-beta/docs/dagster-plus/settings.md b/docs/docs-beta/docs/dagster-plus/settings.md new file mode 100644 index 0000000000000..a7c4dad36b30e --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/settings.md @@ -0,0 +1,6 @@ +--- +title: "Dagster+ settings" +unlisted: true +--- + +# Dagster+ settings diff --git a/docs/docs-beta/docs/dagster-plus/whats-dagster-plus.md b/docs/docs-beta/docs/dagster-plus/whats-dagster-plus.md new file mode 100644 index 0000000000000..0fe6016026500 --- /dev/null +++ b/docs/docs-beta/docs/dagster-plus/whats-dagster-plus.md @@ -0,0 +1,37 @@ +--- +title: "What's Dagster+?" +displayed_sidebar: 'dagsterPlus' +sidebar_position: 1 +--- + +# What's Dagster+? + +Dagster+ is a managed orchestration platform built on top of Dagster's open source engine. + +Dagster+ is built to be the most performant, reliable, and cost effective way for data engineering teams to run Dagster in production. Dagster+ is also great for students, researchers, or individuals who want to explore Dagster with minimal overhead. + +Dagster+ comes in two flavors: a fully [Serverless](/dagster-plus/deployment/serverless) offering and a [Hybrid](/dagster-plus/deployment/hybrid) offering. In both cases, Dagster+ does the hard work of managing your data orchestration control plane. Compared to a [Dagster open source deployment](/guides/), Dagster+ manages: + +- Dagster's web UI at https://dagster.plus +- Metadata stores for data cataloging and cost insights +- Backend services for orchestration, alerting, and more + +Dagster+ Serverless is fully managed and your Dagster code executes in our environment. In Dagster+ Hybrid, you run an execution environment that connects to the Dagster+ control plane. + +In addition to managed infrastructure, Dagster+ also adds core capabilities on top of Dagster open source to enable teams building data platforms: + +- [Insights](/dagster-plus/insights), a powerful tool for identifying trends in your data platform overtime, optimizing cost, and answering questions like "Why does it feel like our pipelines are taking longer this month?". +- [Alerts](/dagster-plus/deployment/alerts) to a variety of services like Slack, PagerDuty, and email to notify your team of failed runs, data quality issues, and violated SLAs. +- Authentication, [Role Based Access Control](/dagster-plus/access/rbac), and [Audit Logs](/dagster-plus/access/rbac/audit-logs) which help teams implement data mesh strategies while remaining compliant. +- [Data Catalog](/dagster-plus/data-catalog), a powerful search-first experience that builds off of Dagster's best-in-class lineage graph to include searching for assets, metadata, column lineage, and more. +- [Branch Deployments](/dagster-plus/deployment/branch-deployments) + +Ready to [get started](/dagster-plus/getting-started)? + +## Other resources + +- Learn more about Dagster+ [pricing and plan types](https://dagster.io/pricing) or [contact the Dagster team](https://dagster.io/contact) +- Dagster+ includes support, [click here](https://dagster.io/support) to learn more. +- Dagster+ is HIPAA compliant, SOC 2 Type II certified, and meets GDPR requirements. Learn more about Dagster+[ security](https://dagster.io/security). +- Migrate [from a Dagster open source deployment to Dagster+](/guides/self-hosted-to-dagster-plus) +- Dagster+ [status page](https://dagstercloud.statuspage.io/) diff --git a/docs/docs-beta/docs/getting-started/installation.md b/docs/docs-beta/docs/getting-started/installation.md new file mode 100644 index 0000000000000..b8c7d5449b6f3 --- /dev/null +++ b/docs/docs-beta/docs/getting-started/installation.md @@ -0,0 +1,75 @@ +--- +title: Installing Dagster +description: Learn how to install Dagster +sidebar_position: 20 +sidebar_label: Installation +--- + +# Installing Dagster + +To follow the steps in this guide, you'll need: + +- To install Python 3.8 or higher. **Python 3.11 is recommended**. +- To install pip, a Python package installer + +## Setting up a virtual environment + +After installing Python, it's recommended that you set up a virtual environment. This will isolate your Dagster project from the rest of your system and make it easier to manage dependencies. + +There are many ways to do this, but this guide will use `venv` as it doesn't require additional dependencies. + + + +```bash +python -m venv venv +source venv/bin/activate +``` + + +```bash +python -m venv venv +source venv\Scripts\activate +``` + + + +:::tip +**Looking for something more powerful than `venv`?** Try `pyenv` or `pyenv-virtualenv`, which can help you manage multiple versions of Python on a single machine. Learn more in the [pyenv GitHub repository](https://github.com/pyenv/pyenv). +::: + +## Installing Dagster + +To install Dagster in your virtual environment, open your terminal and run the following command: + +```bash +pip install dagster dagster-webserver +``` + +This command will install the core Dagster library and the webserver, which is used to serve the Dagster UI. + +## Verifying installation + +To verify that Dagster is installed correctly, run the following command: + +```bash +dagster --version +``` + +The version numbers of Dagster should be printed in the terminal: + +```bash +> dagster --version +dagster, version 1.8.4 +``` + +## Troubleshooting + +If you encounter any issues during the installation process: + +- Refer to the [Dagster GitHub repository](https://github.com/dagster-io/dagster) for troubleshooting, or +- Reach out to the [Dagster community](/about/community) + +## Next steps + +- Get up and running with your first Dagster project in the [Quickstart](/getting-started/quickstart) +- Learn to [create data assets in Dagster](/guides/data-assets) diff --git a/docs/docs-beta/docs/getting-started/quickstart.md b/docs/docs-beta/docs/getting-started/quickstart.md new file mode 100644 index 0000000000000..3f662e12f056d --- /dev/null +++ b/docs/docs-beta/docs/getting-started/quickstart.md @@ -0,0 +1,157 @@ +--- +title: "Dagster quickstart" +description: Learn how to quickly get up and running with Dagster +sidebar_position: 30 +sidebar_label: "Quickstart" +--- + +# Build your first Dagster project + +Welcome to Dagster! In this guide, you'll use Dagster to create a basic pipeline that: + +- Extracts data from a CSV file +- Transforms the data +- Loads the transformed data to a new CSV file + +## What you'll learn + +- How to set up a basic Dagster project +- How to create a Dagster asset for each step of the Extract, Transform, and Load (ETL) process +- How to use Dagster's UI to monitor and execute your pipeline + +## Prerequisites + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Basic Python knowledge +- Python 3.8+ installed on your system. Refer to the [Installation guide](/getting-started/installation) for information. +
      + +## Step 1: Set up the Dagster environment + +1. Open the terminal and create a new directory for your project: + + ```bash + mkdir dagster-quickstart + cd dagster-quickstart + ``` + +2. Create and activate a virtual environment: + + + + ```bash + python -m venv venv + source venv/bin/activate + ``` + + + ```bash + python -m venv venv + source venv\Scripts\activate + ``` + + + +3. Install Dagster and the required dependencies: + + ```bash + pip install dagster dagster-webserver pandas + ``` + +## Step 2: Create the Dagster project structure + +:::info +The project structure in this guide is simplified to allow you to get started quickly. When creating new projects, use `dagster project scaffold` to generate a complete Dagster project. +::: + +Next, you'll create a basic Dagster project that looks like this: + +``` +dagster-quickstart/ +├── quickstart/ +│ ├── __init__.py +│ └── assets.py +├── data/ + └── sample_data.csv +``` + +1. To create the files and directories outlined above, run the following: + + ```bash + mkdir quickstart data + touch quickstart/__init__.py quickstart/assets.py + touch data/sample_data.csv + ``` + +2. In the `data/sample_data.csv` file, add the following content: + + ```csv + id,name,age,city + 1,Alice,28,New York + 2,Bob,35,San Francisco + 3,Charlie,42,Chicago + 4,Diana,31,Los Angeles + ``` + + This CSV will act as the data source for your Dagster pipeline. + +## Step 3: Define the assets + +Now, create the assets for the ETL pipeline. Open `quickstart/assets.py` and add the following code: + + + +This may seem unusual if you're used to task-based orchestration. In that case, you'd have three separate steps for extracting, transforming, and loading. + +However, in Dagster, you'll model your pipelines using assets as the fundamental building block, rather than tasks. + +## Step 4: Run the pipeline + +1. In the terminal, navigate to your project's root directory and run: + + ```bash + dagster dev -f quickstart/assets.py + ``` + +2. Open your web browser and navigate to `http://localhost:3000`, where you should see the Dagster UI: + + ![2048 resolution](/images/getting-started/quickstart/dagster-ui-start.png) + +3. In the top navigation, click **Assets > View global asset lineage**. + +4. Click **Materialize** to run the pipeline. + +5. In the popup that displays, click **View**. This will open the **Run details** page, allowing you to view the run as it executes. + + ![Screenshot of Dagster Asset Lineage](/img/placeholder.svg) + + Use the **view buttons** in near the top left corner of the page to change how the run is displayed. You can also click the asset to view logs and metadata. + +## Step 5: Verify the results + +In your terminal, run: + +```bash +cat data/processed_data.csv +``` + +You should see the transformed data, including the new `age_group` column: + +```bash +id,name,age,city,age_group +1,Alice,28,New York,Young +2,Bob,35,San Francisco,Middle +3,Charlie,42,Chicago,Senior +4,Diana,31,Los Angeles,Middle +``` + +## Next steps + +Congratulations! You've just built and run your first pipeline with Dagster. Next, you can: + +- Continue with the [ETL pipeline tutorial](/tutorial/tutorial-etl) to learn how to build a more complex ETL pipeline +- Learn how to [Think in assets](/concepts/assets/thinking-in-assets) diff --git a/docs/docs-beta/docs/guides/alerting.md b/docs/docs-beta/docs/guides/alerting.md new file mode 100644 index 0000000000000..05d4545e02012 --- /dev/null +++ b/docs/docs-beta/docs/guides/alerting.md @@ -0,0 +1,13 @@ +--- +title: "Setting up alerts" +sidebar_position: 30 +sidebar_label: "Alerting" +unlisted: true +--- + +Alerting if my pipeline didn't execute +Tracking when a run or sensor fails +Knowing when a pipeline never ran +Knowing if a pipeline is running slow, or an asset is late +Knowing if my Dagster instance is having issues + diff --git a/docs/docs-beta/docs/guides/amazon-web-services.md b/docs/docs-beta/docs/guides/amazon-web-services.md new file mode 100644 index 0000000000000..74f8b5db5e704 --- /dev/null +++ b/docs/docs-beta/docs/guides/amazon-web-services.md @@ -0,0 +1,5 @@ +--- +title: "Deploy to Amazon Web Services" +sidebar_position: 10 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/apis.md b/docs/docs-beta/docs/guides/apis.md new file mode 100644 index 0000000000000..349b0aba3ebc7 --- /dev/null +++ b/docs/docs-beta/docs/guides/apis.md @@ -0,0 +1,70 @@ +--- +title: Connecting to APIs +sidebar_position: 20 +sidebar_label: API connections +--- + +When building a data pipeline, you'll likely need to connect to several external APIs, each with its own specific configuration and behavior. This guide demonstrates how to standardize your API connections and customize their configuration using Dagster resources. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +- Familiarity with [Resources](/concepts/resources) +- To install the `requests` library: + ```bash + pip install requests + ``` + +
      + +## Step 1: Write a resource that connects to an API + +This example fetches the sunrise time for a given location from a REST API. + +Using `ConfigurableResource`, define a Dagster resource with a method that returns the sunrise time for a location. In the first version of this resource, the location is hard-coded to San Francisco International Airport. + + + +## Step 2: Use the resource in an asset + +To use the resource, provide it as a parameter to an asset and include it in the `Definitions` object: + + + +When you materialize `sfo_sunrise`, Dagster will provide an initialized `SunResource` to the `sun_resource` parameter. + +## Step 3: Configure the resource + +Many APIs have configuration you can set to customize your usage. The following example updates the resource with configuration to allow for setting the query location: + + + +The configurable resource can be provided to an asset exactly as before. When the resource is initialized, you can pass values for each of the configuration options. + +When you materialize `sfo_sunrise`, Dagster will provide a `SunResource` initialized with the configuration values to the `sun_resource` parameter. + +## Step 4: Source configuration using environment variables + +Resources can also be configured with environment variables. You can use Dagster's built-in `EnvVar` class to source configuration values from environment variables at materialization time. + +In this example, there's a new `home_sunrise` asset. Rather than hard-coding the location of your home, you can set it in environment variables and configure the `SunResource` by reading those values: + + + +When you materialize `home_sunrise`, Dagster will read the values set for the `HOME_LATITUDE`, `HOME_LONGITUDE`, and `HOME_TIMZONE` environment variables and initialize a `SunResource` with those values. + +The initialized `SunResource` will be provided to the `sun_resource` parameter. + +:::note +You can also fetch environment variables using the `os` library. Dagster treats each approach to fetching environment variables differently, such as when they're fetched or how they display in the UI. Refer to the [Environment variables guide](/todo) for more information. +::: + +## Next steps + +- [Authenticate to a resource](/todo) +- [Use different resources in different execution environments](/todo) +- [Set environment variables in Dagster+](/todo) +- Learn what [Dagster-provided resources](/todo) are available diff --git a/docs/docs-beta/docs/guides/asset-checks.md b/docs/docs-beta/docs/guides/asset-checks.md new file mode 100644 index 0000000000000..3ee1edb59166c --- /dev/null +++ b/docs/docs-beta/docs/guides/asset-checks.md @@ -0,0 +1,84 @@ +--- +title: "Testing assets with Asset checks" +sidebar_position: 10 +sidebar_label: "Asset checks" +--- + +Asset checks are tests that verify specific properties of your data assets, allowing you to execute data quality checks on your data. For example, you can create checks to: + +- Ensure a particular column doesn't contain null values +- Verify that a tabular asset adheres to a specified schema +- Check if an asset's data needs refreshing + +Each asset check should test only a single asset property to keep tests uncomplicated, reusable, and easy to track over time. + +
      +Prerequisites + +To follow this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +
      + +## Getting started + +To get started with asset checks, follow these general steps: + +1. **Define an asset check:** Asset checks are typically defined using the `@asset_check` or `@multi_asset_check` decorator and run either within an asset or separate from the asset. +2. **Pass the asset checks to the `Definitions` object:** Asset checks must be added to `Definitions` for Dagster to recognize them. +3. **Choose how to execute asset checks**: By default, all jobs targeting an asset will also run associated checks, although you can run asset checks through the Dagster UI. +4. **View asset check results in the UI**: Asset check results will appear in the UI and can be customized through the use of metadata and severity levels +5. **Alert on failed asset check results**: If you are using Dagster+, you can choose to alert on asset checks. + +## Defining a single asset check \{#single-check} + +:::tip +Dagster's dbt integration can model existing dbt tests as asset checks. Refer to the [dagster-dbt documentaiton](/integrations/dbt) for more information. +::: + +A asset check is defined using the `@asset_check` decorator. + +The following example defines an asset check on an asset that fails if the `order_id` column of the asset contains a null value. The asset check will run after the asset has been materialized. + + + +## Defining multiple asset checks \{#multiple-checks} + +In most cases, checking the data quality of an asset will require multiple checks. + +The following example defines two asset checks using the `@multi_asset_check` decorator: + +- One check that fails if the `order_id` column of the asset contains a null value +- Another check that fails if the `item_id` column of the asset contains a null value + +In this example, both asset checks will run in a single operation after the asset has been materialized. + + + +## Programmatically generating asset checks \{#factory-pattern} + +Defining multiple checks can also be done using a factory pattern. The example below defines the same two asset checks as in the previous example, but this time using a factory pattern and the `@multi_asset_check` decorator. + + + +## Blocking downstream materialization + +By default, if a parent's asset check fails during a run, the run will continue and downstream assets will be materialized. To prevent this behavior, set the `blocking` argument to `True` in the `@asset_check` decorator. + +In the example bellow, if the `orders_id_has_no_nulls` check fails, the downstream `augmented_orders` asset won't be materialized. + + + +## Scheduling and monitoring asset checks + +In some cases, running asset checks separately from the job materializing the assets can be useful. For example, running all data quality checks once a day and sending an alert if they fail. This can be achieved using schedules and sensors. + +In the example below, two jobs are defined: one for the asset and another for the asset check. Schedules are defined to materialize the asset and execute the asset check independently. A sensor is defined to send an email alert when the asset check job fails. + + + +## Next steps + +- Learn more about assets in [Understanding Assets](/concepts/assets) +- Learn more about asset checks in [Understanding Asset Checks](/concepts/assets/asset-checks) +- Learn how to use [Great Expectations with Dagster](https://dagster.io/blog/ensuring-data-quality-with-dagster-and-great-expectations) diff --git a/docs/docs-beta/docs/guides/asset-dependencies.md b/docs/docs-beta/docs/guides/asset-dependencies.md new file mode 100644 index 0000000000000..421a61a08fece --- /dev/null +++ b/docs/docs-beta/docs/guides/asset-dependencies.md @@ -0,0 +1,122 @@ +--- +title: Pass data between assets +description: Learn how to pass data between assets in Dagster +sidebar_position: 30 +last_update: + date: 2024-08-11 + author: Pedram Navid +--- + +In Dagster, assets are the building blocks of your data pipeline and it's common to want to pass data between them. This guide will help you understand how to pass data between assets. + +There are three ways of passing data between assets: + +- Explicitly managing data, by using external storage +- Implicitly managing data, using I/O managers +- Avoiding passing data between assets altogether by combining several tasks into a single asset + +This guide walks through all three methods. + +--- + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A basic understanding of Dagster concepts such as assets and resources +- Dagster and the `dagster-duckdb-pandas` package installed +
      + +--- + +## Move data assets explicitly using external storage + +A common and recommended approach to passing data between assets is explicitly managing data using external storage. This example pipeline uses a SQLite database as external storage: + + + +In this example, the first asset opens a connection to the SQLite database and writes data to it. The second asset opens a connection to the same database and reads data from it. The dependency between the first asset and the second asset is made explicit through the asset's `deps` argument. + +The benefits of this approach are: + +- It's explicit and easy to understand how data is stored and retrieved +- You have maximum flexibility in terms of how and where data is stored, for example, based on environment + +The downsides of this approach are: + +- You need to manage connections and transactions manually +- You need to handle errors and edge cases, for example, if the database is down or if a connection is closed + +## Move data between assets implicitly using I/O managers + +Dagster's I/O managers are a powerful feature that manages data between assets by defining how data is read from and written to external storage. They help separate business logic from I/O operations, reducing boilerplate code and making it easier to change where data is stored. + +I/O managers handle: + +1. **Input**: Reading data from storage and loading it into memory for use by dependent assets. +2. **Output**: Writing data to the configured storage location. + +For a deeper understanding of I/O managers, check out the [Understanding I/O managers](/concepts/io-managers) guide. + + + +In this example, a `DuckDBPandasIOManager` is instantiated to run using a local file. The I/O manager handles both reading and writing to the database. + +:::warning + +This example works for local development, but in a production environment +each step would execute in a separate environment and would not have access to the same file system. Consider a cloud-hosted environment for production purposes. + +::: + +The `people()` and `birds()` assets both write their dataframes to DuckDB +for persistent storage. The `combined_data()` asset requests data from both assets by adding them as parameters to the function, and the I/O manager handles the reading them from DuckDB and making them available to the `combined_data` function as dataframes. **Note**: When you use I/O managers you don't need to manually add the asset's dependencies through the `deps` argument. + +The benefits of this approach are: + +- The reading and writing of data is handled by the I/O manager, reducing boilerplate code +- It's easy to swap out different I/O managers based on environments without changing the underlying asset computation + +The downsides of this approach are: + +- The I/O manager approach is less flexible should you need to customize how data is read or written to storage +- Some decisions may be made by the I/O manager for you, such as naming conventions that can be hard to override. + +## Avoid passing data between assets by combining assets + +In some cases, you may find that you can avoid passing data between assets by +carefully considering how you have modeled your pipeline: + +Consider this example: + + + +This example downloads a zip file from Google Drive, unzips it, and loads the data into a Pandas DataFrame. It relies on each asset running on the same file system to perform these operations. + +The assets are modeled as tasks, rather than as data assets. For more information on the difference between tasks and data assets, check out the [Thinking in Assets](/concepts/assets/thinking-in-assets) guide. + +In this refactor, the `download_files`, `unzip_files`, and `load_data` assets are combined into a single asset, `my_dataset`. This asset downloads the files, unzips them, and loads the data into a data warehouse. + + + +This approach still handles passing data explicitly, but no longer does it across assets, +instead within a single asset. This pipeline still assumes enough disk and +memory available to handle the data, but for smaller datasets, it can work well. + +The benefits of this approach are: + +- All the computation that defines how an asset is created is contained within a single asset, making it easier to understand and maintain +- It can be faster than relying on external storage, and doesn't require the overhead of setting up additional compute instances. + +The downsides of this approach are: + +- It makes certain assumptions about how much data is being processed +- It can be difficult to reuse functions across assets, since they're tightly coupled to the data they produce +- It may not always be possible to swap functionality based on the environment you are running in. For example, if you are running in a cloud environment, you may not have access to the local file system. + +--- + +## Related resources + +{/* TODO: add links to relevant API documentation here. */} diff --git a/docs/docs-beta/docs/guides/asset-factories-with-deps.md b/docs/docs-beta/docs/guides/asset-factories-with-deps.md new file mode 100644 index 0000000000000..3aec89d72554e --- /dev/null +++ b/docs/docs-beta/docs/guides/asset-factories-with-deps.md @@ -0,0 +1,36 @@ +--- +title: 'Programmatically defining dependencies using asset factories' +sidebar_position: 60 +sidebar_label: 'Asset Factories (2)' +--- + +In data engineering, it's often helpful to reuse code to define similar assets. For example, you may want to represent every file in a directory as an asset. + +Additionally, you may be serving stakeholders who aren't familiar with Python or Dagster. They may prefer interacting with assets using a domain-specific language (DSL) built on top of a configuration language such as YAML. + +Using an asset factory reduces complexity and creates a pluggable entry point to define additional assets. + +
      + Prerequisites + +This guide builds upon the concepts in the [asset factories](/guides/asset-factories) tutorial. +
      + +--- + +## Building an asset factory in Python + +Imagine a data analytics team that maintains a large number of tables. To support analytics needs, the team runs queries and constructs new tables from the results. + +Each table can be represented in YAML by a name, upstream asset dependencies, and a query: + + +Here's how you might add Python logic to define these assets in Dagster. + + + +## Defining dependencies between factory assets and regular assets + +Here's how you might add Python logic to define a Dagster asset downstream of factory assets: + + diff --git a/docs/docs-beta/docs/guides/asset-factories.md b/docs/docs-beta/docs/guides/asset-factories.md new file mode 100644 index 0000000000000..df6c615f91df4 --- /dev/null +++ b/docs/docs-beta/docs/guides/asset-factories.md @@ -0,0 +1,72 @@ +--- +title: 'Creating domain-specific languages with asset factories' +sidebar_position: 60 +sidebar_label: 'Asset factories' +--- + +Often in data engineering, you'll find yourself needing to create a large number of similar assets. For example: + +- A set of database tables all have the same schema +- A set of files in a directory all have the same format + +It's also possible you're serving stakeholders who aren't familiar with Python or Dagster. They may prefer interacting with assets using a domain-specific language (DSL) built on top of a configuration language such as YAML. + +The asset factory pattern can solve both of these problems. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with: + - [Assets](/guides/data-assets) + - [Resources](/concepts/resources) + - SQL, YAML and Amazon Web Services (AWS) S3 + - [Pydantic](https://docs.pydantic.dev/latest/) and [Jinja2](https://jinja.palletsprojects.com/en/3.1.x/) +- A Python virtual environment with the following dependencies installed: + + ```bash + pip install dagster dagster-aws duckdb pyyaml pydantic + ``` +
      + +## Building an asset factory in Python + +Let's imagine a team that often has to perform the same repetitive ETL task: download a CSV file from S3, run a basic SQL query on it, and then upload the result as a new file back to S3. + +To automate this process, you might define an asset factory in Python like the following: + + + +The asset factory pattern is essentially a function that takes in some configuration and returns `dg.Definitions`. + +## Configuring an asset factory with YAML + +Now, the team wants to be able to configure the asset factory using YAML instead of Python, with a file like this: + + + +To implement this, parse the YAML file and use it to create the S3 resource and ETL jobs: + + + +## Improving usability with Pydantic and Jinja + +There are a few problems with the current approach: + +1. **The YAML file isn't type-checked**, so it's easy to make mistakes that will cause cryptic `KeyError`s +2. **The YAML file contains secrets**. Instead, it should reference environment variables. + +To solve these problems, you can use Pydantic to define a schema for the YAML file and Jinja to template the YAML file with environment variables. + +Here's what the new YAML file might look like. Note how Jinja templating is used to reference environment variables: + + + +And the Python implementation: + + + +## Next steps + +TODO diff --git a/docs/docs-beta/docs/guides/asset-sensors.md b/docs/docs-beta/docs/guides/asset-sensors.md new file mode 100644 index 0000000000000..1685d0714218e --- /dev/null +++ b/docs/docs-beta/docs/guides/asset-sensors.md @@ -0,0 +1,105 @@ +--- +title: Triggering cross-job dependencies with Asset Sensors +sidebar_position: 30 +sidebar_label: Cross-job dependencies +--- + +Asset sensors in Dagster provide a powerful mechanism for monitoring asset materializations and triggering downstream computations or notifications based on those events. + +This guide covers the most common use cases for asset sensors, such as defining cross-job and cross-code location dependencies. + +
      +Prerequisites + +To follow this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +- Familiarity with [Ops and Jobs](/concepts/ops-jobs) + +
      + +## Getting started + +Asset sensors monitor an asset for new materialization events and target a job when a new materialization occurs. + +Typically, asset sensors return a `RunRequest` when a new job is to be triggered. However, they may provide a `SkipReason` if the asset materialization doesn't trigger a job. + +For example, you may wish to monitor an asset that's materialized daily, but don't want to trigger jobs on holidays. + +## Cross-job and cross-code location dependencies + +Asset sensors enable dependencies across different jobs and different code locations. This flexibility allows for modular and decoupled workflows. + +```mermaid +graph LR; + +AssetToWatch(AssetToWatch) --> AssetSensor(AssetSensor); +AssetSensor--> Job(Job); +Job --> Asset1(Asset1); +Job --> Asset2(Asset1); + +subgraph CodeLocationA + AssetToWatch +end + +subgraph CodeLocationB + AssetSensor + Job + Asset1 + Asset2 +end +``` + +This is an example of an asset sensor that triggers a job when an asset is materialized. The `daily_sales_data` asset is in the same code location as the job and other asset for this example, but the same pattern can be applied to assets in different code locations. + + + +## Customize evaluation logic + +You can customize the evaluation function of an asset sensor to include specific logic for deciding when to trigger a run. This allows for fine-grained control over the conditions under which downstream jobs are executed. + +```mermaid +stateDiagram-v2 + direction LR + + classDef userDefined fill: lightblue + + [*] --> AssetMaterialization + AssetMaterialization --> [*] + + AssetMaterialization --> UserEvaluationFunction:::userDefined + UserEvaluationFunction: User Evaluation Function + + UserEvaluationFunction --> RunRequest + UserEvaluationFunction --> SkipReason + SkipReason --> [*] + RunRequest --> [*] + + class UserEvaluationFunction userDefined + classDef userDefined fill: var(--theme-color-accent-lavendar) +``` + +In the following example, the `@asset_sensor` decorator defines a custom evaluation function that returns a `RunRequest` object when the asset is materialized and certain metadata is present, otherwise it skips the run. + + + +## Trigger a job with configuration + +By providing a configuration to the `RunRequest` object, you can trigger a job with a specific configuration. This is useful when you want to trigger a job with custom parameters based on custom logic you define. + +For example, you might use a sensor to trigger a job when an asset is materialized, but also pass metadata about that materialization to the job: + + + +## Monitor multiple assets + +When building a pipeline, you may want to monitor multiple assets with a single sensor. This can be accomplished with a multi-asset sensor. + +The following example uses a `@multi_asset_sensor` to monitor multiple assets and trigger a job when any of the assets are materialized: + + + +## Next steps + +- Learn more about asset sensors in [Understanding Automation](/concepts/automation) +- Explore [Declarative Automation](/concepts/automation/declarative-automation) as an alternative to asset sensors diff --git a/docs/docs-beta/docs/guides/authentication.md b/docs/docs-beta/docs/guides/authentication.md new file mode 100644 index 0000000000000..d089c393abc7d --- /dev/null +++ b/docs/docs-beta/docs/guides/authentication.md @@ -0,0 +1,5 @@ +--- +title: Authenticating to a resource +sidebar_position: 60 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/automation.md b/docs/docs-beta/docs/guides/automation.md new file mode 100644 index 0000000000000..8978835abd410 --- /dev/null +++ b/docs/docs-beta/docs/guides/automation.md @@ -0,0 +1,102 @@ +--- +title: "Automating Pipelines" +description: Learn how to automate your data pipelines. +last_update: + date: 2024-08-12 + author: Pedram Navid +--- + +Automation is key to building reliable, efficient data pipelines. This guide provides a simplified overview of the main ways to automate processes in Dagster, helping you choose the right method for your needs. You will find links to more detailed guides for each method below. + +
      + Prerequisites + +Before continuing, you should be familiar with: + +- [Asset definitions](/concepts/assets) +- [Jobs](/concepts/ops-jobs) + +
      + +## Automation methods overview + +Dagster offers several ways to automate pipeline execution: + +1. [Schedules](#schedules) - Run jobs at specified times +2. [Sensors](#sensors) - Trigger runs based on events +3. [Asset Sensors](#asset-sensors) - Trigger jobs when specific assets materialize + +## Schedules + +Schedules allow you to run jobs at specified times, like "every Monday at 9 AM" or "daily at midnight." +A schedule combines a selection of assets, known as a [Job](/concepts/ops-jobs), and a [cron expression](https://en.wikipedia.org/wiki/Cron) +to define when the job should be run. + +To make creating cron expressions easier, you can use an online tool like [Crontab Guru](https://crontab.guru/). + +### When to use schedules + +- You need to run jobs at regular intervals +- You want basic time-based automation + +For examples of how to create schedules, see [How-To Use Schedules](/guides/schedules). + +For more information about how Schedules work, see [About Schedules](/concepts/schedules). + +## Sensors + +Sensors allow you to trigger runs based on events or conditions that you define, like a new file arriving or an external system status change. + +You must provide a function that the sensor will use to determine if it should trigger a run. + +Like schedules, sensors operate on a selection of assets, known as [Jobs](/concepts/ops-jobs) and can either start a pipeline through a Run or log a reason for not starting a pipeline using a SkipReason. + +### When to use sensors + +- You need event-driven automation +- You want to react to changes in external systems + +For more examples of how to create sensors, see the [How-To Use Sensors](/guides/sensors) guide. + +For more information about how sensors work, see the [About Sensors](/concepts/sensors) concept page. + +## Asset sensors + +Asset Sensors trigger jobs when specified assets are materialized, allowing you to create dependencies between jobs or code locations. + +### When to use Asset sensors + +- You need to trigger jobs based on asset materializations +- You want to create dependencies between different jobs or code locations + +For more examples of how to create asset sensors, see the [How-To Use Asset Sensors](/guides/asset-sensors) guide. + +## Declarative Automation + +{/* TODO: add content */} + +## How to choose the right automation method + +Consider these factors when selecting an automation method: + +1. **Pipeline Structure**: Are you working primarily with assets, ops, or a mix? +2. **Timing Requirements**: Do you need regular updates or event-driven processing? +3. **Data Characteristics**: Is your data partitioned? Do you need to update historical data? +4. **System Integration**: Do you need to react to external events or systems? + +Use this table to help guide your decision: + +| Method | Best For | Works With | +| ---------------------- | -------------------------------------- | ------------------- | +| Schedules | Regular, time-based job runs | Assets, Ops, Graphs | +| Sensors | Event-driven automation | Assets, Ops, Graphs | +| Declarative Automation | Asset-centric, condition-based updates | Assets only | +| Asset Sensors | Cross-job/location asset dependencies | Assets only | + +## Next steps + +- Learn more about [advanced scheduling patterns] - {/* TODO ADD LINK */} +- Explore [complex sensor examples] - {/* TODO ADD LINK */} +- Dive into [Declarative Automation best practices] - {/* TODO ADD LINK */} + +By understanding and effectively using these automation methods, you can build more efficient data pipelines that respond to your specific needs and constraints. diff --git a/docs/docs-beta/docs/guides/backfill.md b/docs/docs-beta/docs/guides/backfill.md new file mode 100644 index 0000000000000..220d06051df23 --- /dev/null +++ b/docs/docs-beta/docs/guides/backfill.md @@ -0,0 +1,6 @@ +--- +title: Backfilling data +sidebar_position: 60 +sidebar_label: "Backfilling" +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/build-your-own.md b/docs/docs-beta/docs/guides/build-your-own.md new file mode 100644 index 0000000000000..c526e0ca0e8fd --- /dev/null +++ b/docs/docs-beta/docs/guides/build-your-own.md @@ -0,0 +1,5 @@ +--- +title: "Building an asset integration" +sidebar_position: 70 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/cloud-services.md b/docs/docs-beta/docs/guides/cloud-services.md new file mode 100644 index 0000000000000..04ccde2824574 --- /dev/null +++ b/docs/docs-beta/docs/guides/cloud-services.md @@ -0,0 +1,5 @@ +--- +title: Connecting to cloud services +sidebar_position: 25 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/code-locations.md b/docs/docs-beta/docs/guides/code-locations.md new file mode 100644 index 0000000000000..7316b34d8b440 --- /dev/null +++ b/docs/docs-beta/docs/guides/code-locations.md @@ -0,0 +1,8 @@ +--- +title: "Managing code locations with Definitions" +sidebar_position: 50 +sidebar_label: "Code locations and Definitions" +unlisted: true +--- + +# Manage code locations diff --git a/docs/docs-beta/docs/guides/configuring-assets.md b/docs/docs-beta/docs/guides/configuring-assets.md new file mode 100644 index 0000000000000..24ae1f74b4d1d --- /dev/null +++ b/docs/docs-beta/docs/guides/configuring-assets.md @@ -0,0 +1,48 @@ +--- +title: Configuring assets +sidebar_label: Asset runs +sidebar_position: 50 +--- + +The Dagster UI is commonly used to manually materialize assets, backfill historical data, debug a production issue, or some other one-off task. + +You'll often want to be able to adjust parameters when materializing assets, which can be accomplished with Dagster's asset configuration system. + +
      + Prerequisites + +To follow the steps in this guide, you'll need familiarity with: + +- [Assets](/guides/data-assets) +- [Pydantic](https://docs.pydantic.dev/latest/) + +
      + +## Making assets configurable + +For an asset to be configurable, first define a schema that inherits from the Dagster `Config` class. + +For example, you want to allow your team to change the lookback time window for the computation that materializes an asset: + + + +## Specifying config using the Dagster UI + +:::note +Run configurations reference an `op` which is the underlying compute associated with an asset. Refer to the [Ops vs Assets](/concepts/ops-jobs/ops-vs-assets) guide for more information. +::: + +When launching a run using the Launchpad in the UI, you can provide a run config file as YAML or JSON that overrides the default configuration for your asset. + +On any page with a **Materialize** button, click the **options menu > Open launchpad** to access the Launchpad: + +![Highlighted Open Launchpad option in the Materialize options menu of the Dagster UI](/img/placeholder.svg) + +This will open the Launchpad, where you can scaffold the config, customize its values, and manually materialize the asset: + +![Dagster Launchpad that configures an asset to have a lookback window of 7 days](/img/placeholder.svg) + +## Next steps + +- Learn more about Dagster [assets](/concepts/assets) +- Connect to external [APIs](/guides/apis) and [databases](/guides/databases) with resources diff --git a/docs/docs-beta/docs/guides/configuring.md b/docs/docs-beta/docs/guides/configuring.md new file mode 100644 index 0000000000000..30f910f45bf31 --- /dev/null +++ b/docs/docs-beta/docs/guides/configuring.md @@ -0,0 +1,6 @@ +--- +title: Configuring pipelines and runs +sidebar_label: Configuring pipelines +sidebar_position: 40 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/custom-logging.md b/docs/docs-beta/docs/guides/custom-logging.md new file mode 100644 index 0000000000000..79fe115a23cb0 --- /dev/null +++ b/docs/docs-beta/docs/guides/custom-logging.md @@ -0,0 +1,136 @@ +--- +title: "Setting up custom logging" +sidebar_position: 20 +--- + +# Custom loggers + +Custom loggers are used to alter the structure of the logs being produced by your Dagster pipelines. For example, JSON logs can be produced to more easily be processed by log management systems. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A basic understanding of Dagster concepts such as assets, jobs and definitions +- A working knowledge of the Python logging module + +
      + + +## Step 1: Add a prebuilt custom logger to your jobs + +This step shows how to add an existing custom logger, the `json_console_logger`, to your jobs. This will +override the default `colored_console_logger` and produce logs in JSON format. + + +### Add the custom logger to your asset jobs + +The following example shows how to add the custom logger to your code location definitions and configure an asset job to use it. + + + + +### Add the custom logger to your ops-based jobs + +Configuring a ops job to use the custom logger slightly differs from the asset job example. The following example shows how: + + + + +### Expected `json_console_logger` output + +The `json_console_logger` will emit an exhaustive single line JSON document containing the full log record, including the dagster metadata fields. + +Here's an example of the output for reference, formatted for readability: + +```json +{ + "args": [], + "created": 1725455358.2311811, + "dagster_meta": { + "dagster_event": null, + "dagster_event_batch_metadata": null, + "job_name": "hackernews_topstory_ids_job", + "job_tags": { + ".dagster/grpc_info": "{\"host\": \"localhost\", \"socket\": \"/var/folders/5b/t062dlpj3j716l4w1d3yq6vm0000gn/T/tmpds_hvzm9\"}", + "dagster/preset_name": "default", + "dagster/solid_selection": "*" + }, + "log_message_id": "3850cfb8-f9fb-458a-a986-3efd26e4b859", + "log_timestamp": "2024-09-04T13:09:18.225289", + "op_name": "get_hackernews_topstory_ids", + "orig_message": "Compute Logger - Got 500 top stories.", + "resource_fn_name": null, + "resource_name": null, + "run_id": "11528a21-38d5-43e7-8b13-993e47ce7f7d", + "step_key": "get_hackernews_topstory_ids" + }, + "exc_info": null, + "exc_text": null, + "filename": "log_manager.py", + "funcName": "emit", + "levelname": "INFO", + "levelno": 20, + "lineno": 272, + "module": "log_manager", + "msecs": 231.0, + "msg": "hackernews_topstory_ids_job - 11528a21-38d5-43e7-8b13-993e47ce7f7d - get_hackernews_topstory_ids - Compute Logger - Got 500 top stories.", + "name": "dagster", + "pathname": "/home/dagster/workspace/quickstart-etl/.venv/lib/python3.11/site-packages/dagster/_core/log_manager.py", + "process": 35373, + "processName": "SpawnProcess-2:1", + "relativeCreated": 813.4410381317139, + "stack_info": null, + "thread": 8584465408, + "threadName": "MainThread" +} +``` + +### Changing the logger configuration in the Dagster UI + +You can also change the logger configuration in the Dagster UI. This is useful if you want to change the logger configuration without changing the code, to use the custom logger on a manual asset materialization launch, or change the verbosity of the logs. + +```yaml +loggers: + console: + config: + log_level: DEBUG +``` + +## Step 2: Write your custom logger + +In this example, we'll create a logger implementation that produces comma separated values from selected fields in the +log record. Other examples can be found in the codebase, in the built-in loggers such as `json_console_logger`. + + + +Sample output: + +```csv +2024-09-04T09:29:33.643818,dagster,INFO,cc76a116-4c8f-400f-9c4d-c42b66cdee3a,topstory_ids_job,hackernews_topstory_ids,Compute Logger - Got 500 top stories. +``` + +The available fields emitted by the logger are defined in the [`LogRecord`](https://docs.python.org/3/library/logging.html#logrecord-objects) object. +In addition, Dagster specific information can be found in the `dagster_meta` attribute of the log record. The previous +example already some of these attributes. + +It contains the following fields: + +- `dagster_event`: string +- `dagster_event_batch_metadata`: string +- `job_name`: string +- `job_tags`: a dictionary of strings +- `log_message_id`: string +- `log_timestamp`: string +- `op_name`: string +- `run_id`: string +- `step_key`: string + +## Next steps + +Import your own custom logger by modifying the example provided in step 1. + +## Limitations + +It's not currently possible to globally configure the logger for all jobs in a repository. diff --git a/docs/docs-beta/docs/guides/custom-metrics-logs.md b/docs/docs-beta/docs/guides/custom-metrics-logs.md new file mode 100644 index 0000000000000..06dc0a2a9cf97 --- /dev/null +++ b/docs/docs-beta/docs/guides/custom-metrics-logs.md @@ -0,0 +1,5 @@ +--- +title: "Use custom metrics in logs" +sidebar_position: 30 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/dagster-plus.md b/docs/docs-beta/docs/guides/dagster-plus.md new file mode 100644 index 0000000000000..bc65f96ffff0b --- /dev/null +++ b/docs/docs-beta/docs/guides/dagster-plus.md @@ -0,0 +1,8 @@ +--- +title: "Deploy to Dagster+" +sidebar_position: 40 +--- + +# Deploy to Dagster+ + +See the [Dagster+ getting started](/dagster-plus/getting-started) guide. diff --git a/docs/docs-beta/docs/guides/data-assets.md b/docs/docs-beta/docs/guides/data-assets.md new file mode 100644 index 0000000000000..6d6540555d17a --- /dev/null +++ b/docs/docs-beta/docs/guides/data-assets.md @@ -0,0 +1,76 @@ +--- +title: 'Defining data assets with decorators' +sidebar_label: 'Create Assets' +sidebar_position: 10 +--- + +The most common way to create a data asset in Dagster is by annotating a function with an asset decorator. The function computes the contents of the asset, such as a database table or file. + +Dagster supports several ways of creating assets, but this guide will focus on using Python decorators to define data assets. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- To have Dagster installed. Refer to the [Installation](/getting-started/installation) guide for more information. + +
      + +Dagster has four types of asset decorators: + +| Decorator | Description | +| -------------------- | ------------------------------------------------------------------------------------------------------------------------------ | +| `@asset` | Defines a single asset. [See an example](#single-asset). | +| `@multi_asset` | Outputs multiple assets from a single operation. [See an example](#multi-asset). | +| `@graph_asset` | Outputs a single asset from multiple operations without making each operation itself an asset. [See an example](#graph-asset). | +| `@graph_multi_asset` | Outputs multiple assets from multiple operations | + +## Defining operations that create a single asset \{#single-asset} + +The simplest way to define a data asset in Dagster is by using the `@asset` decorator. This decorator marks a Python function as an asset. + + + +In this example, `my_data_asset` is an asset that logs its output. Dagster automatically tracks its dependencies and handles its execution within the pipeline. + +## Defining operations that create multiple assets \{#multi-asset} + +When you need to generate multiple assets from a single operation, you can use the `@multi_asset` decorator. This allows you to output multiple assets while maintaining a single processing function, which could be useful for: + +- Making a single call to an API that updates multiple tables +- Using the same in-memory object to compute multiple assets + +In this example, `my_multi_asset` produces two assets: `asset_one` and `asset_two`. Each is derived from the same function, which makes it easier to handle related data transformations together: + + + +This example could be expressed as: + +```mermaid +flowchart LR + multi(my_multi_asset) --> one(asset_one) + multi(my_multi_asset) --> two(asset_two) +``` + +## Defining multiple operations that create a single asset \{#graph-asset} + +For cases where you need to perform multiple operations to produce a single asset, you can use the `@graph_asset` decorator. This approach encapsulates a series of operations and exposes them as a single asset, allowing you to model complex pipelines while only exposing the final output. + + + +In this example, `complex_asset` is an asset that's the result of two operations: `step_one` and `step_two`. These steps are combined into a single asset, abstracting away the intermediate representations. + +This example could be expressed as: + +```mermaid +flowchart LR + one((step_one)) --> asset(complex_asset) + two((step_two)) --> asset(complex_asset) +``` + +## Next steps + +- Learn to create [dependencies between assets](/guides/asset-dependencies) +- Enrich Dagster's built-in data catalog with [asset metadata](/guides/metadata) +- Learn to use a [factory pattern](/guides/asset-factories) to create multiple, similar assets diff --git a/docs/docs-beta/docs/guides/data-freshness-testing.md b/docs/docs-beta/docs/guides/data-freshness-testing.md new file mode 100644 index 0000000000000..dbd5c5a97130d --- /dev/null +++ b/docs/docs-beta/docs/guides/data-freshness-testing.md @@ -0,0 +1,81 @@ +--- +title: "Checking for data freshness" +sidebar_position: 20 +sidebar_label: "Data freshness" +--- + +Freshness checks provide a way to identify data assets that are overdue for an update. For example, you can use freshness checks to identify stale assets caused by: + +- The pipeline hitting an error and failing +- Runs not being scheduled +- A backed up run queue +- Runs taking longer than expected to complete + +Freshness checks can also communicate SLAs for their data freshness. For example, downstream asset consumers can determine how often assets are expected to be updated by looking at the defined checks. + +
      + Prerequisites + +To follow the steps in this guide, you'll need familiarity with: + +- [Assets](/guides/data-assets) +- [External assets](/guides/external-assets) +- [Asset checks](/guides/asset-checks) + +
      + +## Getting started + +To get started with freshness checks, follow these general steps: + +1. **Define a freshness check**: Freshness checks are defined using `build_last_update_freshness_checks`, which utilizes an asset's last updated time to determine freshness. + + **If using Dagster+ Pro**, you can also use [`build_anomaly_detection_freshness_checks`](#anomaly-detection) to define a freshness check that uses an anomaly detection model to determine freshness. +2. **Define a schedule or sensor**: Defining a schedule or sensor (`build_sensor_for_freshness_checks`) is required to ensure the freshness check executes. If the check only runs after the asset has been materialized, the check won't be able to detect the times materialization fails. +3. **Pass the freshness check and schedule/sensor to the `Definitions` object**: Freshness checks and the associated schedule or sensor must be added to a `Definitions` object for Dagster to recognize them. +4. **View the freshness check results in the Dagster UI**: Freshness check results will appear in the UI, allowing you to track the results over time. + +## Materializable asset freshness \{#materializable-assets} + +Materializable assets are assets materialized by Dagster. To calculate whether a materializable asset is overdue, Dagster uses the asset's last materialization timestamp. + +The example below defines a freshness check on an asset that fails if the asset's latest materialization occurred more than one hour before the current time. + + + +## External asset freshness \{#external-assets} + +[External assets](/guides/external-assets) are assets orchestrated by systems other than Dagster. + +To run freshness checks on external assets, the checks need to know when the external assets were last updated. Emitting these update timestamps as values for the `dagster/last_updated_timestamp` observation metadata key allows Dagster to calculate whether the asset is overdue. + +The example below defines a freshness check and adds a schedule to run the check periodically. + + + +### Testing freshness with anomaly detection \{#anomaly-detection} + +:::note +Anomaly detection is a Dagster+ Pro feature. +::: + +Instead of applying policies on an asset-by-asset basis, Dagster+ Pro users can use `build_anomaly_detection_freshness_checks` to take advantage of a time series anomaly detection model to determine if data arrives later than expected. + + + +:::note +If the asset hasn't been updated enough times, the check will pass with a message indicating that more data is needed to detect anomalies. +::: + +## Alerting on overdue assets + +:::note +Freshness check alerts are a Dagster+ feature. +::: + +In Dagster+, you can set up alerts to notify you when assets are overdue for an update. Refer to the [Dagster+ alerting guide](/dagster-plus/deployment/alerts) for more information. + +## Next steps + +- Explore more [asset checks](/guides/asset-checks) +- Explore how to [raise alerts when assets are overdue](/dagster-plus/deployment/alerts) (Dagster+ Pro) \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/databases.md b/docs/docs-beta/docs/guides/databases.md new file mode 100644 index 0000000000000..ab6f19ea279f3 --- /dev/null +++ b/docs/docs-beta/docs/guides/databases.md @@ -0,0 +1,63 @@ +--- +title: Connecting to databases +description: How to configure resources to connect to databases +sidebar_position: 10 +sidebar_label: Database connections +--- + +When building a data pipeline, you may need to extract data from or load data into a database. In Dagster, resources can be used to connect to a database by acting as a wrapper around a database client. + +This guide demonstrates how to standardize database connections and customize their configuration using Dagster resources. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/guides/data-assets) + +To run the examples in this guide, you'll need: + +- Connection information for a Snowflake database +- To install the following: + + ```bash + pip install dagster dagster-snowflake pandas + ``` + +
      + +## Step 1: Write a resource \{#step-one} + +This example creates a resource that represents a Snowflake database. Using `SnowflakeResource`, define a Dagster resource that connects to a Snowflake database: + + + +## Step 2: Use the resource in an asset \{#step-two} + +To use the resource, provide it as a parameter to an asset and include it in the `Definitions` object: + + + +When you materialize these assets, Dagster will provide an initialized `SnowflakeResource` to the assets' `iris_db` parameter. + +## Step 3: Source configuration with environment variables \{#step-three} + +Resources can be configured using environment variables, allowing you to connect to environment-specific databases, swap credentials, and so on. You can use Dagster's built-in `EnvVar` class to source configuration values from environment variables at asset materialization time. + +In this example, a second instance of the Snowflake resource, named `production` has been added: + + + +When the assets are materialized, Dagster will use the `deployment_name` environment variable to determine which Snowflake resource to use (`local` or `production`). Then, Dagster will read the values set for each resource's environment variables (ex: `DEV_SNOWFLAKE_PASSWORD`) and initialize a `SnowflakeResource` with those values. + +The initialized `SnowflakeResource` will be provided to the assets' `iris_db` parameter. + +:::note +You can also fetch environment variables using the `os` library. Dagster treats each approach to fetching environment variables differently, such as when they're fetched or how they display in the UI. Refer to the [Environment variables guide](/todo) for more information. +::: + +## Next steps + +- Explore how to use resources for [Connecting to APIs](/guides/apis) +- Go deeper into [Understanding Resources](/concepts/resources) \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/declarative-automation.md b/docs/docs-beta/docs/guides/declarative-automation.md new file mode 100644 index 0000000000000..806b0a380df5f --- /dev/null +++ b/docs/docs-beta/docs/guides/declarative-automation.md @@ -0,0 +1,5 @@ +--- +title: "Declarative automation" +sidebar_label: "Declarative automation" +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/deployment.md b/docs/docs-beta/docs/guides/deployment.md new file mode 100644 index 0000000000000..88b03ec02f293 --- /dev/null +++ b/docs/docs-beta/docs/guides/deployment.md @@ -0,0 +1,6 @@ +--- +title: "Deployment" +unlisted: true +--- + +# Deployment diff --git a/docs/docs-beta/docs/guides/docker.md b/docs/docs-beta/docs/guides/docker.md new file mode 100644 index 0000000000000..d2be589f3fca6 --- /dev/null +++ b/docs/docs-beta/docs/guides/docker.md @@ -0,0 +1,174 @@ +--- +title: "Deploying with Docker Compose" +description: A guide to deploying Dagster with Docker Compose. +--- + +This guide provides instructions for deploying Dagster using Docker Compose. This is useful when you want to, for example, deploy Dagster on an AWS EC2 host. A typical Dagster Docker deployment includes a several long-running containers: one for the webserver, one for the daemon, and one for each code location. It also typically executes each run in its own container. + +
      + Prerequisites +- Familiarity with Docker and Docker Compose +- Familiarity with `dagster.yaml` instance configuration +- Familiarity with `workspace.yaml` code location configuration +
      + + +## Define a Docker image for the Dagster webserver and daemon + +The Dagster webserver and daemon are the two _host processes_ in a Dagster deployment. They typically each run in their own container, using the same Docker image. This image contains Dagster packages and configuration, but no user code. + +To build this Docker image, use a Dockerfile like the following, with a name like `Dockerfile_dagster`: + +```dockerfile +FROM python:3.10-slim + +RUN pip install \ + dagster \ + dagster-graphql \ + dagster-webserver \ + dagster-postgres \ # Database for Dagster storage + dagster-docker # Enables the Docker run launcher + +# Set $DAGSTER_HOME and copy dagster.yaml and workspace.yaml there +ENV DAGSTER_HOME=/opt/dagster/dagster_home/ + +RUN mkdir -p $DAGSTER_HOME + +COPY dagster.yaml workspace.yaml $DAGSTER_HOME + +WORKDIR $DAGSTER_HOME +``` + +Additionally, the following files should be in the same directory as the Docker file: +- A `workspace.yaml` to tell the webserver and daemon the location of the code servers +- A `dagster.yaml` to configure the Dagster instance + +## Define a Docker image for each code location + +Each code location typically has its own Docker image, and that image is also used for runs launched for that code location. + +To build a Docker image for a code location, use a Dockerfile like the following, with a name like `Dockerfile_code_location_1`: + +```dockerfile +FROM python:3.10-slim + +RUN pip install \ + dagster \ + dagster-postgres \ + dagster-docker + +# Add code location code +WORKDIR /opt/dagster/app +COPY directory/with/your/code/ /opt/dagster/app + +# Run dagster code server on port 4000 +EXPOSE 4000 + +# CMD allows this to be overridden from run launchers or executors to execute runs and steps +CMD ["dagster", "code-server", "start", "-h", "0.0.0.0", "-p", "4000", "-f", "definitions.py"] +``` + +## Write a Docker Compose file + +The following `docker-compose.yaml` defines how to run the webserver container, daemon container, code location containers, and database container: + +```yaml title="docker-compose.yaml" +version: "3.7" + +services: + # This service runs the postgres DB used by dagster for run storage, schedule storage, + # and event log storage. + docker_postgresql: + image: postgres:11 + container_name: docker_postgresql + environment: + POSTGRES_USER: "postgres_user" + POSTGRES_PASSWORD: "postgres_password" + POSTGRES_DB: "postgres_db" + networks: + - docker_network + + # This service runs the code server that loads your user code. + docker_code_location_1: + build: + context: . + dockerfile: ./Dockerfile_code_location_1 + container_name: docker_code_location_1 + image: docker_user_code_image + restart: always + environment: + DAGSTER_POSTGRES_USER: "postgres_user" + DAGSTER_POSTGRES_PASSWORD: "postgres_password" + DAGSTER_POSTGRES_DB: "postgres_db" + DAGSTER_CURRENT_IMAGE: "docker_user_code_image" + networks: + - docker_network + + # This service runs dagster-webserver. + docker_webserver: + build: + context: . + dockerfile: ./Dockerfile_dagster + entrypoint: + - dagster-webserver + - -h + - "0.0.0.0" + - -p + - "3000" + - -w + - workspace.yaml + container_name: docker_webserver + expose: + - "3000" + ports: + - "3000:3000" + environment: + DAGSTER_POSTGRES_USER: "postgres_user" + DAGSTER_POSTGRES_PASSWORD: "postgres_password" + DAGSTER_POSTGRES_DB: "postgres_db" + volumes: # Make docker client accessible so we can terminate containers from the webserver + - /var/run/docker.sock:/var/run/docker.sock + - /tmp/io_manager_storage:/tmp/io_manager_storage + networks: + - docker_network + depends_on: + - docker_postgresql + - docker_code_location_1 + + # This service runs the dagster-daemon process, which is responsible for taking runs + # off of the queue and launching them, as well as creating runs from schedules or sensors. + docker_daemon: + build: + context: . + dockerfile: ./Dockerfile_dagster + entrypoint: + - dagster-daemon + - run + container_name: docker_daemon + restart: on-failure + environment: + DAGSTER_POSTGRES_USER: "postgres_user" + DAGSTER_POSTGRES_PASSWORD: "postgres_password" + DAGSTER_POSTGRES_DB: "postgres_db" + volumes: # Make docker client accessible so we can launch containers using host docker + - /var/run/docker.sock:/var/run/docker.sock + - /tmp/io_manager_storage:/tmp/io_manager_storage + networks: + - docker_network + depends_on: + - docker_postgresql + - docker_code_location_1 + +networks: + docker_network: + driver: bridge + name: docker_network +``` + +## Start your deployment + +To start the deployment, run: + +```shell +docker compose up +``` diff --git a/docs/docs-beta/docs/guides/external-assets.md b/docs/docs-beta/docs/guides/external-assets.md new file mode 100644 index 0000000000000..05f2ae22d720c --- /dev/null +++ b/docs/docs-beta/docs/guides/external-assets.md @@ -0,0 +1,111 @@ +--- +title: Representing external data sources with external assets +sidebar_position: 80 +sidebar_label: 'External data sources' +--- + +One of Dagster's goals is to present a single unified lineage of all of the data assets in an organization, even if those assets are orchestrated by systems other than Dagster. + +With **external assets**, you can model assets orchestrated by other systems natively within Dagster, ensuring you have a comprehensive catalog of your organization's data. You can also create new data assets downstream of these external assets. + +Unlike native assets, Dagster can't materialize external assets directly or put them in a schedule. In these cases, an external system must inform Dagster when an external asset is updated. + +For example, external assets could be: + +- Files in a data lake that are populated by a bespoke internal tool +- A CSV file delivered daily by SFTP from a partner +- A table in a data warehouse populated by another orchestrator + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/guides/data-assets) +- Familiarity with [Sensors](/guides/sensors) +
      + +## Defining external assets + +Let's say you have a partner who sends you raw transaction data by SFTP on an almost daily basis. This data is later cleaned and stored in an internal data lake. + +Because the raw transaction data isn't materialized by Dagster, it makes sense to model it as an external asset. The following example accomplishes this by using `AssetSpec`: + + + +Refer to the [`AssetSpec` API docs](/todo) for the parameters you can provide to an external asset. + +## Recording materializations and metadata + +When an external asset is modeled in Dagster, you also need to inform Dagster whenever the external asset is updated. You should also include any relevant metadata about the asset, such as the time it was last updated. + +There are two main ways to do this: + +- Pulling external assets events with sensors +- Pushing external asset events using Dagster's REST API + +### Pulling with sensors + +You can use a Dagster [sensor](/guides/sensors) to regularly poll the external system and pull information about the external asset into Dagster. + +For example, here's how you would poll an external system like an SFTP server to update an external asset whenever the file is changed. + + + +Refer to the [Sensors guide](/guides/sensors) for more information about sensors. + +### Pushing with the REST API + +You can inform Dagster that an external asset has materialized by pushing the event from an external system to the REST API. The following examples demonstrate how to inform Dagster that a materialization of the `raw_transactions` external asset has occurred. + +The required headers for the REST API depend on whether you're using Dagster+ or OSS. Use the tabs to view an example API request for each Dagster type. + + + + +Authentication headers are required if using Dagster+. The request should made to your Dagster+ organization and a specific deployment in the organization. + +```shell +curl \ + -X POST \ + -H 'Content-Type: application/json' \ + -H 'Dagster-Cloud-Api-Token: [YOUR API TOKEN]' \ + 'https://[YOUR ORG NAME].dagster.cloud/[YOUR DEPLOYMENT NAME]/report_asset_materialization/' \ + -d ' +{ + "asset_key": "raw_transactions", + "metadata": { + "file_last_modified_at_ms": 1724614700266 + } +}' +``` + + + + +Authentication headers aren't required if using Dagster OSS. The request should be pointed at your open source URL, which is `http://localhost:3000` in this example. + +```shell +curl \ + -X POST \ + -H 'Content-Type: application/json' \ + 'http://localhost:3000/report_asset_materialization/' \ + -d ' +{ + "asset_key": "raw_transactions", + "metadata": { + "file_last_modified_at_ms": 1724614700266 + } +}' +``` + + + + +Refer to the [External assets REST API documentation](/todo) for more information. + +## Modeling a graph of external assets + +Like regular Dagster assets, external assets can have dependencies. This is useful when you want to model an entire data pipeline orchestrated by another system. + + diff --git a/docs/docs-beta/docs/guides/google-cloud-platform.md b/docs/docs-beta/docs/guides/google-cloud-platform.md new file mode 100644 index 0000000000000..514c0695254c3 --- /dev/null +++ b/docs/docs-beta/docs/guides/google-cloud-platform.md @@ -0,0 +1,5 @@ +--- +title: "Deploy to Google Cloud Platform" +sidebar_position: 20 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/index.md b/docs/docs-beta/docs/guides/index.md new file mode 100644 index 0000000000000..cb467c230dadd --- /dev/null +++ b/docs/docs-beta/docs/guides/index.md @@ -0,0 +1,6 @@ +--- +title: "Guides" +unlisted: true +--- + +# Guides diff --git a/docs/docs-beta/docs/guides/ingesting-data.md b/docs/docs-beta/docs/guides/ingesting-data.md new file mode 100644 index 0000000000000..a7786b94ffb78 --- /dev/null +++ b/docs/docs-beta/docs/guides/ingesting-data.md @@ -0,0 +1,52 @@ +--- +title: Ingesting data with Dagster +description: Learn how to orchestrate data ingestion with Dagster +sidebar_position: 10 +sidebar_label: Ingest data +--- + +import { Card, CardGroup } from '@site/src/components/Cards'; + +:::note +This guide focuses on batch data ingestion, as streaming data ingestion doesn't typically rely on an orchestrator to kick off or coordinate computations. However, streaming data assets can still be represented in Dagster for lineage purposes. +::: + +Dagster is often used to orchestrate the ingestion of data into a data warehouse or data lake, where it can be queried and transformed. To ingest data with Dagster, you can use pre-built connectors or write your own custom code. + +
      +Prerequisites + +To follow this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +
      + +## How Dagster supports data ingestion + +As a data orchestrator, Dagster helps with data ingestion as it can: + +- **Automatically kick off computations that ingest data**, thus removing the need for manual intervention +- **Coordinate data ingestion with downstream data transformation,** such as rebuilding a set of dbt models after upstream data is updated +- **Represent ingested data assets in an asset graph**, which enables understanding what ingested data exists, how ingested data is used, and where data is ingested from + +## Orchestrating data ingestion tools + +Dagster currently integrates with the following data ingestion tools, enabling you to sync diverse data sources into data warehouse tables using pre-built connectors: + +- [Airbyte](/integrations/airbyte) +- [dlt](/integrations/dlt) +- [Fivetran](/integrations/fivetran) +- [Sling](/integrations/sling) + +## Writing custom data ingestion pipelines + +Using a language like Python to write code for data ingestion into a platform is also a common approach. This is useful when you have unique data ingestion requirements that aren't addressed by existing tools, or when you prefer to keep your platform streamlined without adding new tools. + +For example, imagine there's a CSV file of counties on the internet and you want to load it into your Snowflake data warehouse as a table. To do this, you might define a Dagster asset that represents that table in your warehouse. The asset's materialization function fetches data from the internet and loads it into that table: + + + +## Next steps + +- Transform data using [Dagster's dbt integration](/guides/transform-dbt) +- Use asset checks [to test data quality](/guides/asset-checks) and [freshness](/guides/data-freshness-testing) \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/io-managers.md b/docs/docs-beta/docs/guides/io-managers.md new file mode 100644 index 0000000000000..703ef990f5531 --- /dev/null +++ b/docs/docs-beta/docs/guides/io-managers.md @@ -0,0 +1,88 @@ +--- +title: "Managing stored data with I/O managers" +sidebar_position: 50 +sidebar_label: "I/O managers" +--- + +I/O managers in Dagster allow you to keep the code for data processing separate from the code for reading and writing data. This reduces repetitive code and makes it easier to change where your data is stored. + +In many Dagster pipelines, assets can be broken down as the following steps: + +1. Reading data a some data store into memory +2. Applying in-memory transform +3. Writing the transformed data to a data store + +For assets that follow this pattern, an I/O manager can streamline the code that handles reading and writing data to and from a source. + +
      +Prerequisites + +To follow the steps in this guide, you'll need familiarity with: + +- [Assets](/concepts/assets) +- [Resources](/concepts/resources) +
      + +## Before you begin + +**I/O managers aren't required to use Dagster, nor are they the best option in all scenarios.** If you find yourself writing the same code at the start and end of each asset to load and store data, an I/O manager may be useful. For example: + +- You have assets that are stored in the same location and follow a consistent set of rules to determine the storage path +- You have assets that are stored differently in local, staging, and production environments +- You have assets that load upstream dependencies into memory to do the computation + +**I/O managers may not be the best fit if:** + +- You want to run SQL queries that create or update a table in a database +- Your pipeline manages I/O on its own by using other libraries/tools that write to storage +- Your assets won't fit in memory, such as a database table with billions of rows + +As a general rule, if your pipeline becomes more complicated in order to use I/O managers, it's likely that I/O managers aren't a good fit. In these cases you should use `deps` to [define dependencies](/guides/asset-dependencies). + +## Using I/O managers in assets \{#io-in-assets} + +Consider the following example, which contains assets that construct a DuckDB connection object, read data from an upstream table, apply some in-memory transform, and write the result to a new table in DuckDB: + + + +Using an I/O manager would remove the code that reads and writes data from the assets themselves, instead delegating it to the I/O manager. The assets would be left only with the code that applies transformations or retrieves the initial CSV file. + + + +To load upstream assets using an I/O manager, specify the asset as an input parameter to the asset function. In this example, the `DuckDBPandasIOManager` I/O manager will read the DuckDB table with the same name as the upstream asset (`raw_sales_data`) and pass the data to `clean_sales_data` as a Pandas DataFrame. + +To store data using an I/O manager, return the data in the asset function. The returned data must be a valid type. This example uses Pandas DataFrames, which the `DuckDBPandasIOManager` will write to a DuckDB table with the same name as the asset. + +Refer to the individual I/O manager documentation for details on valid types and how they store data. + +## Swapping data stores \{#swap-data-stores} + +With I/O managers, swapping data stores consists of changing the implementation of the I/O manager. The asset definitions, which only contain transformational logic, won't need to change. + +In the following example, a Snowflake I/O manager replaced the DuckDB I/O manager. + + + +## Built-in I/O managers \{#built-in} + +Dagster offers built-in library implementations for I/O managers for popular data stores and in-memory formats. + +| Name | Description | +| ------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------- | +| | Default I/O manager. Stores outputs as pickle files on the local file system. | +| | Stores outputs in memory. Primarily useful for unit testing. | +| | Stores outputs as pickle files in Amazon Web Services S3. | +| | Stores outputs as pickle files in Azure ADLS2. | +| | Stores outputs as pickle files in Google Cloud Platform GCS. | +| | Stores Pandas DataFrame outputs in Google Cloud Platform BigQuery. | +| | Stores PySpark DataFrame outputs in Google Cloud Platform BigQuery. | +| | Stores Pandas DataFrame outputs in Snowflake. | +| | Stores PySpark DataFrame outputs in Snowflake. | +| | Stores Pandas DataFrame outputs in DuckDB. | +| | Stores PySpark DataFrame outputs in DuckDB. | +| | Stores Polars DataFrame outputs in DuckDB. | | + +## Next steps + +- Learn to [connect databases](/guides/databases) with resources +- Learn to [connect APIs](/guides/apis) with resources \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/kubernetes.md b/docs/docs-beta/docs/guides/kubernetes.md new file mode 100644 index 0000000000000..1f5c15b648da6 --- /dev/null +++ b/docs/docs-beta/docs/guides/kubernetes.md @@ -0,0 +1,201 @@ +--- +title: "Deploy to Kubernetes" +sidebar_position: 21 +--- + +This guide will walk you through how to run the Dagster-specific components of a Dagster production deployment on a Kubernetes cluster. This includes the Dagster daemon, a webserver to serve the Dagster UI, a PostgrSQL container, and your Dagster project user code. + +Dagster provides [Helm charts](https://github.com/dagster-io/dagster/tree/master/helm) for deploying Dagster that you can customize for your specific needs. For each Dagster component used by the Helm chart, Dagster publishes a corresponding image to [DockerHub](https://hub.docker.com/u/dagster). + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- **Familiarity with [Docker](https://docs.docker.com/)**, and: + - **To have Docker installed**. [Docker installation guide](https://docs.docker.com/engine/install/) + - **Access to a Docker image registry**, such as Amazon Web Services ECR or DockerHub. If you're following along on your local machine, this isn't required. +- **Familiarity with [Kubernetes](https://kubernetes.io/docs/home/)**, and: + - **To have `kubectl` installed**. [Kubernetes installation guide](https://kubernetes.io/docs/tasks/tools/) + - **An existing Kubernetes cluster**. To follow along on your local machine, [install Docker Desktop](https://docs.docker.com/desktop/kubernetes/) and turn on the included Kubernetes server. +- **Familiarity with [Helm](https://helm.sh/docs/)**, and: + - **To have Helm 3 installed**. [Helm installation guide](https://helm.sh/docs/intro/install/) +- A Dagster project to deploy. You can also use the [example project](/todo): + ```bash + dagster project from-example --example deploy_k8s_beta --name deploy_k8s_beta + ``` + +
      + + +## Step 1: Write and build a Docker image containing your Dagster project +### Step 1.1: Write a Dockerfile +Next, you'll build a Docker image that contains your Dagster project and all of its dependencies. The Dockerfile should: +1. Copy your Dagster project into the image. +2. Install `dagster`, `dagster-postgres`, and `dagster-k8s`, along with any other libraries your project depends on. The example project has a dependency on `pandas` so it's included in the `pip install` in the following example Dockerfile. +3. Expose port 80, which we'll use to set up port-forwarding later. + + + + +### Step 1.2: Build and push a Docker image + +To build your Docker image, run the following command from the directory where your Dockerfile is located: + +```bash +docker build . -t iris_analysis:1 +``` +This builds the Docker image from Step 1.1 and gives it the name `iris_analysis` and tag `1`. You can set custom values for both the name and the tag. We recommend that each time you rebuild your Docker image, you assign a new value for the tag to ensure that the correct image is used when running your code. + + +If you are using a Docker image registry, push the image to your registry. If you are following along on your local machine, you can skip this command. + +```bash +docker push iris_analysis:1 +``` + +If you are pushing your image to an image registry, you can find more information about this process in your registry's documentation: +- [Amazon ECR](https://docs.aws.amazon.com/AmazonECR/latest/userguide/docker-push-ecr-image.html) +- [DockerHub](https://docs.docker.com/docker-hub/quickstart/#step-5-build-and-push-a-container-image-to-docker-hub-from-your-computer) + + +## Step 2: Configure `kubectl` to point at a Kubernetes cluster +Before you can deploy Dagster, you need to configure `kubectl` to develop against the Kubernetes cluster where you want Dagster to be deployed. + +If you are using Docker Desktop and the included Kubernetes server, you will need to create a context first. If you already have a Kubernetes cluster and context created for your Dagster deployment you can skip running this command. +```bash +kubectl config set-context dagster --namespace default --cluster docker-desktop --user=docker-desktop +``` + +Ensure that `kubectl` is using the correct context by running: +```bash +kubectl config use-context +``` +Where `` is the name of the context you want to use. For example, if you ran the preceding `kubectl config set-context` command, you will run +```bash +kubectl config use-context dagster +``` + +## Step 3: Add the Dagster Helm chart repository + +Dagster publishes [Helm charts](https://artifacthub.io/packages/helm/dagster/dagster) for deploying Dagster, with a new chart for each Dagster version. + +To install the Dagster Helm charts, run the following command: + +```bash +helm repo add dagster https://dagster-io.github.io/helm +``` + +If you have previously added the Dagster Helm charts, run the following command to update the repository: + +```bash +helm repo update +``` + +## Step 4: Configure the Helm chart for your deployment + +You will need to modify some values in Dagster's Helm chart to deploy your Dagster project. + +### Step 4.1: Copy default Helm chart values into values.yaml + +Run the following command to copy the values installed from the published Helm charts: + +```bash +helm show values dagster/dagster > values.yaml +``` + +### Step 4.2: Modify the `values.yaml` file for your deployment +The `values.yaml` file contains configuration options you can set for your deployment. Different configuration options are explained in [inline comments in `values.yaml`](https://artifacthub.io/packages/helm/dagster/dagster?modal=values). + +To deploy your project, you'll need to set the following options: +- `dagster-user-deployments.deployments.name`, which should be a unique name for your deployment +- `dagster-user-deployments.deployments.image.name` and `dagster-user-deployments.deployments.image.tag`, which should be set to match the Docker image from Step 1 +- `dagster-user-deployments.deployments.dagsterApiGrpcArgs`, which should be set to the arguments you would pass to `dagster api grpc` to [run a gRPC server for your project](https://docs.dagster.io/concepts/code-locations/workspace-files#running-your-own-grpc-server). + +If you are following this guide on your local machine, you will also need to set `pullPolicy: IfNotPresent`. This will use the local version of the image built in Step 1. However, in production use cases when your Docker images are pushed to image registries, this value should remain `pullPolicy: Always`. + + + +In this example, the image `name` and `tag` are set to `iris_analysis` and `1` to match the image that was pushed in Step 1. To run the gPRC server, the path to the Dagster project needs to be specified, so `--python-file` and `/iris_analysis/definitions.py` are set for `dagsterApiGrpcArgs`. + + +## Step 5: Install the Helm chart +Now that you have modified the Helm `values.yaml` file, you can install the changes in your Kubernetes cluster. + +Run the following command to install the Helm chart and create a [release](https://helm.sh/docs/intro/using_helm/#three-big-concepts). + +```bash +helm upgrade --install dagster dagster/dagster -f /path/to/values.yaml +``` + +:::note +If you want to run an older version of the Dagster system components, like the daemon and webserver, pass the `--version` flag to `helm upgrade` with the version of Dagster you are running. For example, if you want to run version `1.7.4` you'll run the command `helm upgrade --install dagster dagster/dagster -f /path/to/values.yaml --version 1.7.4` +::: + +The `helm upgrade` command will launch several pods in your Kubernetes cluster. You can check the status of the pod with the command: + +```bash +kubectl get pods +``` + +It may take a few minutes before all pods are in a `RUNNING` state. If the `helm upgrade` was successful, you should see a `kubectl get pods` output similar to this: + +```bash +$ kubectl get pods +NAME READY STATUS AGE +dagster-daemon-5787ccc868-nsvsg 1/1 Running 3m41s +dagster-webserver-7c5b5c7f5c-rqrf8 1/1 Running 3m41s +dagster-dagster-user-deployments-iris-analysis-564cbcf9f-fbqlw 1/1 Running 3m41s +dagster-postgresql-0 1/1 Running 3m41s +``` + +
      + Debugging failed pods + +If one of the pods is in an error state, you can view the logs using the command + +```bash +kubectl logs +``` + +For example, if the pod `dagster-webserver-7c5b5c7f5c-rqrf8` is in a `CrashLoopBackOff` state, the logs can be viewed with the command + +``` +kubectl logs dagster-webserver-7c5b5c7f5c-rqrf8 +``` + +
      + + +## Step 6: Connect to your Dagster deployment and materialize your assets + +### Step 6.1: Start port-forwarding to the webserver pod +Run the following command to set up port forwarding to the webserver pod: + +```bash +DAGSTER_WEBSERVER_POD_NAME=$(kubectl get pods --namespace default \ + -l "app.kubernetes.io/name=dagster,app.kubernetes.io/instance=dagster,component=dagster-webserver" \ + -o jsonpath="{.items[0].metadata.name}") +kubectl --namespace default port-forward $DAGSTER_WEBSERVER_POD_NAME 8080:80 +``` + +This command gets the full name of the `webserver` pod from the output of `kubectl get pods`, and then sets up port forwarding with the `kubectl port-forward` command. + +### Step 6.2: Visit your Dagster deployment + +The webserver has been port-forwarded to `8080`, so you can visit the Dagster deployment by going to [http://127.0.0.1:8080](http://127.0.0.1:8080). You should see the Dagster landing page + +![Screenshot of Dagster landing page](/img/placeholder.svg) + +### Step 6.3: Materialize an asset +In the Dagster UI, navigate to the Asset catalog and click the **Materialize** button to materialize an asset. Dagster will start a Kubernetes job to materialize the asset. You can introspect on the Kubernetes cluster to see this job: + +```bash +$ kubectl get jobs +NAME COMPLETIONS DURATION AGE +dagster-run-5ee8a0b3-7ca5-44e6-97a6-8f4bd86ee630 1/1 4s 11s +``` + +## Next steps +- Forwarding Dagster logs from a Kubernetes deployment to AWS, Azure, GCP +- Other configuration options for K8s deployment - secrets, diff --git a/docs/docs-beta/docs/guides/metadata.md b/docs/docs-beta/docs/guides/metadata.md new file mode 100644 index 0000000000000..3e3a2e0f5c0cd --- /dev/null +++ b/docs/docs-beta/docs/guides/metadata.md @@ -0,0 +1,160 @@ +--- +title: 'Adding tags and metadata to assets' +description: 'Learn how to add tags and metadata to assets to improve observability in Dagster' +sidebar_position: 40 +sidebar_label: 'Add metadata' +--- + +Assets feature prominently in the Dagster UI. Attaching information to assets allows you to understand where they're stored, what they contain, and how they should be organized. + +Using metadata in Dagster, you can: + +- Attach ownership information +- Organize assets with tags +- Attach rich, complex information such as a Markdown description, a table schema, or a time series +- Link assets with their source code + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/guides/data-assets) +
      + +## Adding owners to assets \{#owners} + +In a large organization, it's important to know which individuals and teams are responsible for a given data asset: + + + +`owners` must either be an email address or a team name prefixed by `team:`. + +:::tip +With Dagster+ Pro, you can create asset-based alerts that automatically notify an asset's owners when triggered. Refer to the [Dagster+ alert documentation](/dagster-plus/deployment/alerts) for more information. +::: + +## Organizing assets with tags \{#tags} + +**Tags** are the primary way to organize assets in Dagster. You can attach several tags to an asset when it's defined, and they will appear in the UI. You can also use tags to search and filter for assets in the [Asset catalog](/todo). They're structured as key-value pairs of strings. + +Here's an example of some tags you might apply to an asset: + +```python +{"domain": "marketing", "pii": "true"} +``` + +Like `owners`, just pass a dictionary of tags to the `tags` argument when defining an asset: + + + +Keep in mind that tags must contain only strings as keys and values. Additionally, the Dagster UI will render tags with the empty string as a "label" rather than a key-value pair. + +## Attaching metadata to assets \{#attaching-metadata} + +**Metadata** allows you to attach rich information to the asset, like a Markdown description, a table schema, or a time series. Metadata is more flexible than tags, as it can store more complex information. + +Metadata can be attached to an asset at definition time, when the code is first imported, or at runtime when an asset is materialized. + +### At definition time \{#definition-time-metadata} + +Using definition metadata to describe assets can make it easy to provide context for you and your team. This metadata could be descriptions of the assets, the types of assets, or links to relevant documentation. + + + +To learn more about the different types of metadata you can attach, see the [`MetadataValue`](/todo) API docs. + +Some metadata keys will be given special treatment in the Dagster UI. See the [Standard metadata types](#standard-metadata-types) section for more information. + +### At runtime \{#runtime-metadata} + +With runtime metadata, you can surface information about an asset's materialization, such as how many records were processed or when the materialization occurred. This allows you to update an asset's information when it changes and track historical metadata as a time series. + + + +Numerical metadata is treated as a time series in the Dagster UI. + +## Standard metadata types \{#standard-metadata-types} + +The following metadata keys are given special treatment in the Dagster UI. + +| Key | Description | +| ----------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `dagster/uri` | **Type:** `str`

      The URI for the asset, for example: "s3://my_bucket/my_object" | +| `dagster/column_schema` | **Type:** [`TableSchema`](/todo)

      For an asset that's a table, the schema of the columns in the table. Refer to the [Table and column metadata](#table-schema) section for details. | +| `dagster/column_lineage` | **Type:** [`TableColumnLineage`](/todo)

      For an asset that's a table, the lineage of column inputs to column outputs for the table. Refer to the [Table and column metadata](#table-schema) section for details. | +| `dagster/row_count` | **Type:** `int`

      For an asset that's a table, the number of rows in the table. Refer to the Table metadata documentation for details. | +| `dagster/partition_row_count` | **Type:** `int`

      For a partition of an asset that's a table, the number of rows in the partition. | +| `dagster/relation_identifier` | **Type:** `str`

      A unique identifier for the table/view, typically fully qualified. For example, my_database.my_schema.my_table | +| `dagster/code_references` | **Type:** [`CodeReferencesMetadataValue`](/todo)

      A list of code references for the asset, such as file locations or references to GitHub URLs. Refer to the [Linking assets with their source code](#source-code) section for details. Should only be provided in definition-level metadata, not materialization metadata. | + +## Table and column metadata \{#table-column} + +Two of the most powerful metadata types are [`TableSchema`](/todo) and [`TableColumnLineage`](/todo). These metadata types allow stakeholders to view the schema of a table right within Dagster, and, in Dagster+, navigate the [Asset catalog](/todo) with the column lineage. + +### Table schema metadata \{#table-schema} + +The following example attaches table and column schema metadata at both definition time and runtime: + + + +There are several data types and constraints available on [`TableColumn`](/todo) objects. Refer to the API documentation for more information. + +### Column lineage metadata \{#column-lineage} + +:::tip +Many integrations such as [dbt](https://docs.dagster.io/integrations/dbt/reference) automatically attach column lineage metadata out-of-the-box. +::: + +Column lineage metadata is a powerful way to track how columns in a table are derived from other columns: + + + +:::tip +Dagster+ provides rich visualization and navigation of column lineage in the Asset catalog. Refer to the [Dagster+ documentation](/dagster-plus) for more information. +::: + +## Linking assets with source code \{#source-code} + +import Experimental from '../partials/\_Experimental.md'; + + + +To link assets with their source code, you can attach a **code reference**. Code references are a type of metadata that allow you to easily view those assets' source code from the Dagster UI, both in local development and in production. + +:::tip +Many integrations such as [dbt](https://docs.dagster.io/integrations/dbt/reference#attaching-code-reference-metadata) support this capability out of the box. +::: + +### Attaching Python code references for local development \{#python-references} + +Dagster can automatically attach code references to assets during local development with one line of code: + + + +### Customizing code references \{#custom-references} + +If you want to customize how code references are attached - such as when you are building [domain-specific languages with asset factories](/guides/asset-factories) - you can manually add the `dagster/code_references` metadata to asset definitions: + + + +### Attaching code references in production \{#production-references} + + + + +Dagster+ can automatically annotate assets with code references to source control, such as GitHub or GitLab. + + + + + + +If you aren't using Dagster+, you can annotate your assets with code references to source control, but it requires manual mapping: + + + +`link_code_references_to_git` currently supports GitHub and GitLab repositories. It also supports customization of how file paths are mapped; see the `AnchorBasedFilePathMapping` API docs for more information. + + + \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/microsoft-azure.md b/docs/docs-beta/docs/guides/microsoft-azure.md new file mode 100644 index 0000000000000..d26b6d4f5f4f7 --- /dev/null +++ b/docs/docs-beta/docs/guides/microsoft-azure.md @@ -0,0 +1,5 @@ +--- +title: "Deploy to Microsoft Azure" +sidebar_position: 30 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/non-python.md b/docs/docs-beta/docs/guides/non-python.md new file mode 100644 index 0000000000000..02e45eb0f01e5 --- /dev/null +++ b/docs/docs-beta/docs/guides/non-python.md @@ -0,0 +1,84 @@ +--- +title: "Using Dagster Pipes to execute non-Python languages" +sidebar_label: "Dagster Pipes" +sidebar_position: 60 +--- + +Dagster is written in Python, but that doesn't mean it's that Python is the only language that can be used when materializing assets. With Dagster Pipes, you can run code in other languages and send information back to Dagster. + +This guide covers how to run JavaScript with Dagster using Pipes, however, the same principle will apply to other languages. + +
      +Prerequisites + +To follow this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +- A basic understanding of JavaScript and Node.js + +To run the examples, you'll need to install: + +- [Node.js](https://nodejs.org/en/download/package-manager/) +- The following packages: + + ```bash + pip install dagster dagster-webserver tensorflow + ``` +
      + +## Step 1: Create a script using Tensorflow in JavaScript + +First, you'll create a JavaScript script that reads a CSV file and uses Tensorflow to train a sequential model. + +Create a file named `tensorflow/main.js` with the following contents: + + + +## Step 2: Create a Dagster asset that runs the script + +In Dagster, create an asset that: + +- Uses the `PipesSubprocessClient` resource to run the script with `node` +- Sets the `compute_kind` to `javascript`. This makes it easy to identify that an alternate compute will be used for materialization. + + + +When the asset is materialized, the stdout and stderr will be captured automatically and shown in the asset logs. If the command passed to Pipes returns a successful exit code, Dagster will produce an asset materialization result. + +![Image of captured stdout](/img/placeholder.svg) + +## Step 3: Send and receive data from the script + +To send context to your script or emit events back to Dagster, you can use environment variables provided by the `PipesSubprocessClient`. + + +- `DAGSTER_PIPES_CONTEXT` - Input context +- `DAGSTER_PIPES_MESSAGES` - Output context + +Create a new file with the following helper functions that read the environment variables, decode the data, and write messages back to Dagster: + + + +Both environment variables are base64 encoded, zip compressed JSON objects. Each JSON object contains a path that indicates where to read or write data. + +## Step 4: Emit events and report materializations from your external process + +Using the utility functions to decode the Dagster Pipes environment variables, you can send additional parameters into the JavaScript process. You can also output more information into the asset materializations. + +Update the `tensorflow/main.js` script to: + +- Retrieve the model configuration from the Dagster context, and +- Report an asset materialization back to Dagster with model metadata + + + +## Step 5: Update the asset to provide extra parameters + +Finally, update your Dagster asset to pass in the model information that's used by the script: + + + +## What's next? + +- Schedule your pipeline to run periodically with [Automating Pipelines](/guides/automation) +- Explore adding asset checks to validate your script with [Understanding Asset Checks](/concepts/assets/asset-checks) diff --git a/docs/docs-beta/docs/guides/partition-dependencies.md b/docs/docs-beta/docs/guides/partition-dependencies.md new file mode 100644 index 0000000000000..a42687b21b41b --- /dev/null +++ b/docs/docs-beta/docs/guides/partition-dependencies.md @@ -0,0 +1,82 @@ +--- +title: Defining dependencies between partitioned assets +description: Learn how to define dependencies between partitioned and unpartitioned assets in Dagster. +sidebar_label: Partition dependencies +sidebar_position: 31 +--- + +Now that you've seen how to model partitioned assets in different ways, you may want to define dependencies between the partitioned assets, or even between unpartitioned assets. + +Partitioned assets in Dagster can have dependencies on other partitioned assets, allowing you to create complex data pipelines where the output of one partitioned asset feeds into another. Here's how it works: + +- A downstream asset can depend on one or more partitions of an upstream asset +- The partitioning schemes don't need to be identical, but they should be compatible + +--- + +
      +Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/guides/data-assets) +- Familiarity with [Partitions](/guides/partitioning) + +
      + +## Dependencies between different time-based partitions \{#different-time-dependencies} + +The following example creates two partitions: `daily_sales_data` and `daily_sales_summary`, which can be executed at the same time in a single schedule. + +
      +Show example + + + +
      + +However, sometimes you might want to define dependencies between different time-based partitions. For example, you might want to aggregate daily data into a weekly report. + +Consider the following example: + + + +In this example: + +- We have a `daily_sales_data` asset partitioned by day, which will be executed daily. +- The `weekly_sales_summary` asset depends on the `daily_sales_data` asset, which will be executed weekly. + + - In this asset, the weekly partition depends on all its parent partitions (all seven days of the week). We use `context.asset_partition_key_range_for_input("daily_sales_data")` to get a range of partition keys, which includes the start and end of the week. + +- To automate the execution of these assets: + + - First, we specify `automation_condition=AutomationCondition.eager()` to the `weekly_sales_summary` asset. This ensures it runs weekly after all seven daily partitions of `daily_sales_data` are up-to-date. + - Second, we specify `automation_condition=AutomationCondition.cron(cron_schedule="0 1 * * *")` to the `daily_sales_data` asset. This ensures it runs daily. + +Note: In a simpler example above, we manually set up a daily schedule for asset execution. For more complex dependency logic, it's recommended to use automation conditions instead of schedules. Automation conditions specify when an asset should run, which allows you to define execution criteria without custom scheduling logic. For more details, see [Declarative Automation](/concepts/automation/declarative-automation). + +## Dependencies between time-based partitions and un-partitioned assets + +TODO + +## Dependencies between time-based and static partitions + +Combining time-based and static partitions allows you to analyze data across both temporal and categorical dimensions. This is particularly useful for scenarios like regional time series analysis. + +{/* TODO */} + +## Dependencies between time-based and dynamic partitions + +{/* TODO */} + +## Dependencies between time-based partitions and un-partitioned assets + +{/* TODO */} + +## Integrating Dagster partitions with external systems: incremental models and dbt + +{/* TODO */} + +## Next steps + +- Go deeper into [Understanding Partitioning](#) diff --git a/docs/docs-beta/docs/guides/partitioning.md b/docs/docs-beta/docs/guides/partitioning.md new file mode 100644 index 0000000000000..45f0a5c4c6c4b --- /dev/null +++ b/docs/docs-beta/docs/guides/partitioning.md @@ -0,0 +1,70 @@ +--- +title: Partitioning assets +description: Learn how to partition your data in Dagster. +sidebar_label: Partition data +sidebar_position: 30 +--- + +In Dagster, partitioning is a powerful technique for managing large datasets, improving pipeline performance, and enabling incremental processing. This guide will help you understand how to implement data partitioning in your Dagster projects. + +There are several ways to partition your data in Dagster: + +- [Time-based partitioning](#time-based), for processing data in specific time intervals +- [Static partitioning](#static-partitions), for dividing data based on predefined categories +- [Two-dimensional partitioning](#two-dimensional-partitions), for partitioning data along two different axes simultaneously +- [Dynamic partitioning](#dynamic-partitions), for creating partitions based on runtime information + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/guides/data-assets) + +
      + +## Time-based partitions \{#time-based} + +A common use case for partitioning is to process data that can be divided into time intervals, such as daily logs or monthly reports. + + + +## Partitions with predefined categories \{#static-partitions} + +Sometimes you have a set of predefined categories for your data. For instance, you might want to process data separately for different regions. + + + +{/* TODO: Link to Backfill page to explain how to backfill regional sales data */} + +## Two-dimensional partitions \{#two-dimensional-partitions} + +Two-dimensional partitioning allows you to partition data along two different axes simultaneously. This is useful when you need to process data that can be categorized in multiple ways. For example: + + + +In this example: + +- Using `MultiPartitionsDefinition`, the `two_dimensional_partitions` is defined with two dimensions: `date` and `region` +- The partition key would be: `2024-08-01|us` +- The `daily_regional_sales_data` and `daily_regional_sales_summary` assets are defined with the same two-dimensional partitioning scheme +- The `daily_regional_sales_schedule` runs daily at 1:00 AM, processing the previous day's data for all regions. It uses `MultiPartitionKey` to specify partition keys for both date and region dimensions, resulting in three runs per day, one for each region. + +## Partitions with dynamic categories \{#dynamic-partitions} + +Sometimes you don't know the partitions in advance. For example, you might want to process new regions that are added in your system. In these cases, you can use dynamic partitioning to create partitions based on runtime information. + +Consider this example: + + + +Because the partition values are unknown in advance, `DynamicPartitionsDefinition` is used to define the partition. Then, the `all_regions_sensor` + +In this example: + +- Because the partition values are unknown in advance, `DynamicPartitionsDefinition` is used to define `region_partitions` +- When triggered, the `all_regions_sensor` will dynamically add all regions to the partition set. Once it kicks off runs, it will dynamically kick off runs for all regions. In this example, that would be six times; one for each region. + +## Next steps + +- TODOD: Partition dependencies \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/pipelines.md b/docs/docs-beta/docs/guides/pipelines.md new file mode 100644 index 0000000000000..6609d18797e0c --- /dev/null +++ b/docs/docs-beta/docs/guides/pipelines.md @@ -0,0 +1,54 @@ +--- +title: "Create a pipeline" +description: "Learn how to create data pipelines using Dagster's asset-based approach" +--- + +In Dagster, data pipelines are created using an asset-based approach. This overview will introduce you to the key concepts and steps involved in building a Dagster pipeline. + +
      + Prerequisites + +Before continuing, you should: +- Have Dagster installed. Refer to the [Installation guide](/getting-started/installation) for more information. +- Complete the [Quickstart](/getting-started/quickstart) + +
      + +Dagster uses assets as the building blocks of data pipelines. An asset represents a data object, such as a table, file, or machine learning model, that your pipeline produces or updates. + +As you define multiple assets and their dependencies, Dagster automatically creates an asset graph. This graph represents the structure and flow of your data pipeline. These nodes are not the individual operations that create the assets, but rather the assets themselves. + +```mermaid +flowchart LR + A[Raw Sales Data] --> B[Cleaned Sales Data] + B --> C[Sales Aggregations] + C --> D[Customer Segmentation] + C --> E[Sales Dashboard] + D --> F[Lead Scoring Model] +``` + +## Steps to create a pipeline + +Most Dagster pipelines follow these steps: + +1. **Define data assets**: Start by creating individual assets using the `@asset` decorator. Each asset represents a data object in your pipeline. + +2. **Create dependencies**: Connect your assets by specifying dependencies, allowing Dagster to understand the flow of data through your pipeline. + +3. **Enrich with metadata**: Add context and improve observability by enriching your assets with metadata, such as descriptions, owners, and data quality checks. + +4. **Partition your data**: For large datasets or time-based processing, use Dagster's partitioning feature to efficiently manage and process your data. + +5. **Represent external sources**: Integrate external data sources into your pipeline to create a comprehensive view of your data ecosystem. + +## Next steps + +To start building your Dagster pipeline, dive into the following guides: + +- [Define data assets](/guides/data-assets) +- [Create dependencies between assets](/guides/asset-dependencies) +- [Enrich assets with metadata](/guides/metadata) +- [Partition assets](/guides/partitioning) +- [Represent external data sources](/guides/external-assets) + +By following these guides, you'll learn how to create powerful, maintainable data pipelines using Dagster's asset-based approach. diff --git a/docs/docs-beta/docs/guides/pipes.md b/docs/docs-beta/docs/guides/pipes.md new file mode 100644 index 0000000000000..2ded7841f8de5 --- /dev/null +++ b/docs/docs-beta/docs/guides/pipes.md @@ -0,0 +1,38 @@ +--- +title: "Executing code outside of Dagster with Pipes" +sidebar_position: 40 +sidebar_label: "External execution with Pipes" +--- + +# Executing code outside of Dagster with Pipes + +Dagster Pipes provides a powerful mechanism for invoking code outside of Dagster, while providing all the benefits of scheduling, reporting, and observability of native Dagster pipelines. + +In this guide, we'll walk you through how to invoke non-Dagster code through Pipes. + +
      +Prerequisites + +- Familiarity with [Assets](/concepts/assets) +
      + +## Setting up an asset that invokes your external code + +To set up invoking code outside of Dagster, you first need to set up an asset. We can invoke the external code within the asset function by using a Dagster Pipes client resource. + +It's not a requirement that this external code know anything about Dagster. It can even be a process running a different language on a remote machine - the only requirement is that it can be triggered from Python. + +In the following example, our external code is in a Python script that we invoke within a Dagster asset. + + + + +Materializing this asset in Dagster from the UI or from a sensor/schedule will kick off the execution of that external code. + +## Sending logs and metadata back to Dagster from external code + +Dagster Pipes also establishes a protocol for external code to optionally send back log and metadata back to Dagster. A Python client for this protocol is available as part of the `dagster_pipes` package. To send back log and metadata back to Dagster, we can create a `PipesContext` object within our external code: + + + +The logs sent back using the `PipesContext` will be visible in the structured logs of that asset materialization's run, and the materialization metadata will be reflected in the asset history. diff --git a/docs/docs-beta/docs/guides/project-structure.md b/docs/docs-beta/docs/guides/project-structure.md new file mode 100644 index 0000000000000..bae009c702cd7 --- /dev/null +++ b/docs/docs-beta/docs/guides/project-structure.md @@ -0,0 +1,156 @@ +--- +title: "How to structure your Dagster project" +--- + +# How to structure your Dagster project + +:::note +Refer to the project scaffolding tutorial to learn how to create a new Dagster project. +::: + +There are many ways to structure your Dagster project, and it can be difficult to know where to start. In this guide, we will walk you through our recommendations for how to organize your Dagster project. As your project grows, you are welcome to deviate from these recommendations. + +## Your initial project structure + +When you first scaffold your project using the Dagster command-line tool, an `assets.py` and `definitions.py` are created in the root of your project. + +```sh +$ dagster project scaffold --name example-dagster-project +``` + +``` +. +├── README.md +├── example_dagster_project +│   ├── __init__.py +│   ├── assets.py +│   └── definitions.py +├── example_dagster_project_tests +│   ├── __init__.py +│   └── test_assets.py +├── pyproject.toml +├── setup.cfg +└── setup.py +``` + +This is a great structure as you are first getting started, however, as you begin to introduce more assets, jobs, resources, sensors, and utility code, you may find that your Python files are growing too large to manage. + +## Restructure your project + +There are several paradigms in which you can structure your project. Choosing one of these structures is often personal preference, and influenced by how you and your team members operate. This guide will outline three possible project structures: + +1. [Option 1: Structured by technology](#option-1-structured-by-technology) +2. [Option 2: Structured by concept](#option-2-structured-by-concept) + + +### Option 1: Structured by technology + +Data engineers often have a strong understanding of the underlying technologies that are used in their data pipelines. Because of that, it's often beneficial to organize your project by technology. This enables engineers to easily navigate the code base and locate files pertaining to the specific technology. + +Within the technology modules, sub-modules can be created to further organize your code. + +``` +. +└── example_dagster_project/ + ├── dbt/ + │ ├── __init__.py + │ ├── assets.py + │ ├── resources.py + │ └── definitions.py + ├── dlt/ + │ ├── __init__.py + │ ├── pipelines/ + │ │ ├── __init__.py + │ │ ├── github.py + │ │ └── hubspot.py + │ ├── assets.py + │ ├── resources.py + │ └── definitions.py + └── definitions.py +``` + +### Option 2: Structured by concept + +It's also possible to introduce a layer of categorization by the overarching data processing concept. For example, whether the job is performing some kind of transformation, ingestion of data, or processing operation. + +This provides additional context to the engineers who may not have as strong of a familiarity with the underlying technologies that are being used. + +``` +. +└── example_dagster_project/ + ├── ingestion/ + │ └── dlt/ + │ ├── assets.py + │ ├── resources.py + │ └── definitions.py + ├── transformation/ + │ ├── dbt/ + │ │ ├── assets.py + │ │ ├── resources.py + │ │ └── partitions.py + │ │ └── definitions.py + │ └── adhoc/ + │ ├── assets.py + │ ├── resources.py + │ └── definitions.py + └── definitions.py +``` + +## Merging definitions objects + +It's possible to define multiple `Definitions` objects, often with one for each sub-module in your project. These definitions can then be merged at the root of your project using the `Definitions.merge` method. + +The benefit of such a structure is that dependencies like resources and partitions can be scoped to their corresponding definitions. + +```py title="example-merge-definitions.py" +from dbt.definitions import dbt_definitions +from dlt.definitions import dlt_definitions + + +defs = Definitions.merge( + dbt_definitions, + dlt_definitions, +) +``` + +## Configuring multiple code locations + +This guide has outlined how to structure a project within a single code location, however, Dagster also allows you to structure a project spanning multiple location. + +In most cases, one code location should be sufficient. A helpful pattern uses multiple code locations to separate conflicting dependencies, where each definition has its own package requirements and deployment specs. + +To include multiple code locations in a single project, you'll need to add a configuration file to your project: + +- **If using Dagster+**, add a `dagster_cloud.yaml` file to the root of your project. +- **If developing locally or deploying to your infrastructure**, add a workspace.yaml file to the root of your project. + +## External projects + +As your data platform evolves, Dagster will enable you to orchestrate other data tools, such as dbt, Sling, or Jupyter notebooks. + +For these projects, it's recommended to store them outside your Dagster project. See the `dbt_project` example below. + +``` +. +├── dbt_project/ +│ ├── config/ +│ │ └── profiles.yml +│ ├── dbt_project.yml +│ ├── macros/ +│ │ ├── aggregate_actions.sql +│ │ └── generate_schema_name.sql +│ ├── models/ +│ │ ├── activity_analytics/ +│ │ │ ├── activity_daily_stats.sql +│ │ │ ├── comment_daily_stats.sql +│ │ │ └── story_daily_stats.sql +│ │ ├── schema.yml +│ │ └── sources.yml +│ └── tests/ +│ └── assert_true.sql +└── example_dagster_project/ +``` + +## Next steps + +- Explore the [Definitions.merge](https://docs.dagster.io/_apidocs/definitions#dagster.Definitions.merge) API docs diff --git a/docs/docs-beta/docs/guides/resources.md b/docs/docs-beta/docs/guides/resources.md new file mode 100644 index 0000000000000..76c3a5e6d1e06 --- /dev/null +++ b/docs/docs-beta/docs/guides/resources.md @@ -0,0 +1,7 @@ +--- +title: Using Resources to manage external systems +sidebar_label: Resources +unlisted: true +--- + +Dagster resources are objects that provide access to external systems, databases, or services. Resources are used to manage connections to external systems, and are used by Dagster ops and assets. diff --git a/docs/docs-beta/docs/guides/running-local-ui-development.md b/docs/docs-beta/docs/guides/running-local-ui-development.md new file mode 100644 index 0000000000000..d65775c6bfc79 --- /dev/null +++ b/docs/docs-beta/docs/guides/running-local-ui-development.md @@ -0,0 +1,76 @@ +--- +title: Running Dagster locally +description: How to run Dagster on your local machine. +--- + +# Running Dagster locally + +In this guide, we'll walk you through how to run Dagster on your local machine using the `dagster dev` command. The `dagster dev` command launches the Dagster UI and the Dagster daemon, allowing you to start a full deployment of Dagster from the command line. + +:::warning +`dagster dev` is intended for local development _only_. If you want to run Dagster for production use cases, see our [Deployment](/guides/deployment) guides. +::: + +## Locating your code + +Before starting local development, you need to tell Dagster how to find the Python code containing your assets and jobs. + +For a refresher on how to set up a Dagster project, follow our [Recommended Dagster Project Structure](/todo) guide. + + + + Dagster can load Python modules as code locations. + + We can use the `-m` argument to supply the name of the module to start a Dagster instance loaded with our definitions: + ```shell + dagster dev -m my_module + ``` + + + + To load definitions from a module without supplying the `-m` command line argument, you can use a `pyproject.toml` file. This file, included in all Dagster example projects, contains a `tool.dagster` section where you can supply the `module_name`: + + + + + + Dagster can load a file directly as a code location. + + Given the preceding file, we can use the `-f` argument to supply the name of the file to start a Dagster instance loaded with our definitions: + ```shell + dagster dev -f defs.py + ``` + + :::note + We don't recommend using the `-f` argument for production deployments, to avoid a whole class of Python import errors. + ::: + + + + +## Creating a persistent instance + +Running `dagster dev` without any additional configuration starts an ephemeral instance in a temporary directory. You may see log output indicating as such: +```shell +Using temporary directory /Users/rhendricks/tmpqs_fk8_5 for storage. +``` +This indicates that any runs or materialized assets created during your session won't be persisted once the session ends. + +To designate a more permanent home for your runs and assets, you can set the `DAGSTER_HOME` environment variable to a folder on your filesystem. Dagster will then use the specified folder for storage on all subsequent runs of `dagster dev`. + +```shell +mkdir -p ~/.dagster_home +export DAGSTER_HOME=~/.dagster_home +dagster dev +``` + +## Configuring your instance + +To configure your Dagster instance, you can create a `dagster.yaml` file in your `$DAGSTER_HOME` folder. + +For example, to have your local instance limit the number of concurrent runs, you could configure the following `dagster.yaml`: + + + +For the full list of options that can be set in the `dagster.yaml` file, refer to the [Dagster instance documentation](/todo). + diff --git a/docs/docs-beta/docs/guides/schedules.md b/docs/docs-beta/docs/guides/schedules.md new file mode 100644 index 0000000000000..9355498a7d1df --- /dev/null +++ b/docs/docs-beta/docs/guides/schedules.md @@ -0,0 +1,64 @@ +--- +title: "Schedule cron-based pipelines" +sidebar_label: "Schedules" +sidebar_position: 10 +--- + +Schedules enable automated execution of jobs at specified intervals. These intervals can range from common frequencies like hourly, daily, or weekly, to more intricate patterns defined using cron expressions. + +
      +Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +- Familiarity with [Ops and Jobs](/concepts/ops-jobs) +
      + +## Basic schedule + +A basic schedule is defined by a `JobDefinition` and a `cron_schedule` using the `ScheduleDefinition` class. A job can be thought of as a selection of assets or operations executed together. + + + +## Run schedules in a different timezone + +By default, schedules without a timezone will run in Coordinated Universal Time (UTC). To run a schedule in a different timezone, set the `timezone` parameter: + +```python +daily_schedule = ScheduleDefinition( + job=daily_refresh_job, + cron_schedule="0 0 * * *", + # highlight-next-line + timezone="America/Los_Angeles", +) +``` + +## Create schedules from partitions + +If using partitions and jobs, you can create a schedule using the partition with `build_schedule_from_partitioned_job`. The schedule will execute at the same cadence specified by the partition definition. + + + + +If you have a [partitioned asset](/guides/partitioning) and job: + + + + + + +If you have a partitioned op job: + + + + + + +## Next steps + +By understanding and effectively using these automation methods, you can build more efficient data pipelines that respond to your specific needs and constraints: + +- Learn more about schedules in [Understanding automation](/concepts/automation) +- React to events with [sensors](/guides/sensors) +- Explore [Declarative Automation](/concepts/automation/declarative-automation) as an alternative to schedules diff --git a/docs/docs-beta/docs/guides/secrets.md b/docs/docs-beta/docs/guides/secrets.md new file mode 100644 index 0000000000000..d26e2a3b32571 --- /dev/null +++ b/docs/docs-beta/docs/guides/secrets.md @@ -0,0 +1,5 @@ +--- +title: Managing secrets +sidebar_position: 50 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/selection-syntax.md b/docs/docs-beta/docs/guides/selection-syntax.md new file mode 100644 index 0000000000000..f87cf5bd231f4 --- /dev/null +++ b/docs/docs-beta/docs/guides/selection-syntax.md @@ -0,0 +1,352 @@ +--- +title: 'Asset selection syntax' +sidebar_position: 70 +sidebar_label: 'Asset selection syntax' +--- + +# Asset selection syntax + +This reference contains information about the syntax for asset selections, including a variety of examples for selecting assets and their downstream and upstream dependencies. + +Asset selection may be used to: + +- Define a job that targets a selection of assets +- Select a set of assets to view in the Dagster UI +- Select a set of assets for an adhoc run + +## Syntax usage + +A query includes a list of clauses. Clauses are separated by commas, except in the case of the `selection` parameter of the following methods. In these cases, each clause is a separate element in a list: + +- `define_asset_job` +- `materialize` +- `materialize_to_memory` + +| Clause syntax | Description | +| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `ASSET_KEY` | Selects a single asset by asset key. [See an example](#single-asset). | +| `COMPONENT/COMPONENT` | Selects an asset key with multiple components, such as a prefix, where slashes (`/`) are inserted between components. [See an example](#multiple-key-components). | +| `*ASSET_KEY` | Selects an asset and all of its upstream dependencies. [See an example](#all-upstream). | +| `ASSET_KEY*` | Selects an asset and all of its downstream dependencies. [See an example](#all-downstream). | +| `+ASSET_KEY` | Selects an asset and one layer upstream of the asset. Including multiple `+`s will select that number of upstream layers from the asset. Any number of `+`s is supported. [See an example](#specific-upstream). | +| `ASSET_KEY+` | Selects an asset and one layer downstream of the asset. Including multiple `+`s will select that number of downstream layers from the asset. Any number of `+`s is supported. [See an example](#specific-downstream). | + +## Examples + +The examples in this section use the following asset graph from the [Dagster University Essentials project](https://github.com/dagster-io/project-dagster-university) to demonstrate how to use the selection syntax: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + +### Selecting a single asset \{#single-asset} + +To select a single asset, use the asset's asset key. This example selects the `taxi_zones_file` asset: + + + + +```python +raw_data_job = define_asset_job(name="raw_data_job", selection="taxi_zones_file") +``` + + + + +```shell +dagster asset list --select taxi_zones_file +dagster asset materialize --select taxi_zones_file +``` + + + + +```shell +taxi_zones_file +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +--- + +### Selecting assets with multiple key components \{#multiple-key-components} + +To select an asset with a key containing multiple components, such as a prefix, insert slashes (`/`) between the components. + +This example selects the `manhattan/manhattan_stats` asset, which is defined below: + +```python +@asset( + deps=[AssetKey(["taxi_trips"]), AssetKey(["taxi_zones"])], key_prefix="manhattan" +) +def manhattan_stats(database: DuckDBResource): + ... +``` + + + + +```python +manhattan_job = define_asset_job(name="manhattan_job", selection="manhattan/manhattan_stats") +``` + + + + +```shell +dagster asset list --select manhattan/manhattan_stats +dagster asset materialize --select manhattan/manhattan_stats +``` + + + + +```shell +manhattan/manhattan_stats +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +--- + +### Selecting multiple assets \{#multiple-assets} + +To select multiple assets, use a list of the assets' asset keys. The assets don't have to be dependent on each other. + +This example selects the `taxi_zones_file` and `taxi_trips_file` assets, which are defined below: + + + + +```python +raw_data_job = define_asset_job( + name="taxi_zones_job", selection=["taxi_zones_file", "taxi_trips_file"] +) +``` + + + + +When selecting multiple assets, enclose the list of asset keys in double quotes (`"`) and separate each asset key with a comma: + +```shell +dagster asset list --select "taxi_zones_file,taxi_trips_file" +dagster asset materialize --select "taxi_zones_file,taxi_trips_file" +``` + + + + +```shell +taxi_zones_file taxi_trips_file +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +--- + +### Selecting an asset's entire lineage \{#full-lineage} + +To select an asset's entire lineage, add an asterisk (`*`) before and after the asset key in the query. + +This example selects the entire lineage for the `taxi_zones` asset. + + + + +```python +taxi_zones_job = define_asset_job(name="taxi_zones_job", selection="*taxi_zones*") +``` + + + + +When selecting an asset's entire lineage using the CLI, enclose the asterisk (`*`) and the asset key in double quotes (`"`): + +```shell +dagster asset list --select "*taxi_zones*" +dagster asset materialize --select "*taxi_zones*" +``` + + + + +```shell +*taxi_zones* +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +--- + +### Selecting upstream dependencies + +#### Selecting all upstream dependencies \{#all-upstream} + +To select an asset and all its upstream dependencies, add an asterisk (`*`) before the asset key in the query. + +This example selects the `manhattan_map` asset and all its upstream dependencies. + + + + +```python +manhattan_job = define_asset_job(name="manhattan_job", selection="*manhattan_map") +``` + + + + +When selecting an asset's dependencies using the CLI, enclose the asterisk (`*`) and the asset key in double quotes (`"`): + +```shell +dagster asset list --select "*manhattan_map" +dagster asset materialize --select "*manhattan_map" +``` + + + + +```shell +*manhattan_map +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +#### Selecting a specific number of upstream layers \{#specific-upstream} + +To select an asset and multiple upstream layers, add a plus sign (`+`) for each layer you want to select before the asset key in the query. + +This example selects the `manhattan_map` asset and two upstream layers. + + + + +```python +manhattan_job = define_asset_job(name="manhattan_job", selection="++manhattan_map") +``` + + + + +When selecting an asset's dependencies using the CLI, enclose the plus sign (`+`) and the asset key in double quotes (`"`): + +```shell +dagster asset list --select "++manhattan_map" +dagster asset materialize --select "++manhattan_map" +``` + + + + +```shell +++manhattan_map +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +--- + +### Selecting downstream dependencies + +#### Selecting all downstream dependencies \{#all-downstream} + +To select an asset and all its downstream dependencies, add an asterisk (`*`) after the asset key in the query. + +This example selects the `taxi_zones_file` asset and all its downstream dependencies. + + + + +```python +taxi_zones_job = define_asset_job(name="taxi_zones_job", selection="taxi_zones_file*") +``` + + + + +When selecting an asset's dependencies using the CLI, enclose the asterisk (`*`) and the asset key in double quotes (`"`): + +```shell +dagster asset list --select "taxi_zones_file*" +dagster asset materialize --select "taxi_zones_file*" +``` + + + + +```shell +taxi_zones_file* +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + + +#### Selecting a specific number of downstream layers \{#specific-downstream} + +To select an asset and multiple downstream layers, add plus sign (`+`) for each layer you want to select after the asset key in the query. + +This example selects the `taxi_trips_file` asset and two downstream layers. + + + + +```python +taxi_zones_job = define_asset_job(name="taxi_zones_job", selection="taxi_zones_file++") +``` + + + + +When selecting an asset's dependencies using the CLI, enclose the plus sign (`+`) and the asset key in double quotes (`"`): + +```shell +dagster asset list --select "taxi_zones_file++" +dagster asset materialize --select "taxi_zones_file++" +``` + + + + +```shell +taxi_zones_file++ +``` + +Which would result in the following asset graph: + +![Screenshot of Daggy U project graph](/img/placeholder.svg) + + + diff --git a/docs/docs-beta/docs/guides/self-hosted-to-dagster-plus.md b/docs/docs-beta/docs/guides/self-hosted-to-dagster-plus.md new file mode 100644 index 0000000000000..715612374d783 --- /dev/null +++ b/docs/docs-beta/docs/guides/self-hosted-to-dagster-plus.md @@ -0,0 +1,5 @@ +--- +title: "Migrate from self-hosted to Dagster+" +sidebar_position: 70 +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/sensors.md b/docs/docs-beta/docs/guides/sensors.md new file mode 100644 index 0000000000000..6792926db0771 --- /dev/null +++ b/docs/docs-beta/docs/guides/sensors.md @@ -0,0 +1,77 @@ +--- +title: Creating event-based pipelines with sensors +sidebar_label: Event triggers +sidebar_position: 20 +--- + +Sensors enable you to trigger Dagster runs in response to events from external systems. They run at regular intervals, either triggering a run or explaining why a run was skipped. For example, you can trigger a run when a new file is added to an Amazon S3 bucket or when a database row is updated. + +
      +Prerequisites + +To follow the steps in this guide, you'll need: + +- Familiarity with [Assets](/concepts/assets) +- Familiarity with [Ops and Jobs](/concepts/ops-jobs) +
      + +## Basic sensor + +Sensors are defined with the `@sensor` decorator. The following example includes a `check_for_new_files` function that simulates finding new files. In a real scenario, this function would check an actual system or directory. + +If the sensor finds new files, it starts a run of `my_job`. If not, it skips the run and logs `No new files found` in the Dagster UI. + + + +:::tip +Unless a sensor has a `default_status` of `DefaultSensorStatus.RUNNING`, it won't be enabled when first deployed to a Dagster instance. To find and enable the sensor, click **Automation > Sensors** in the Dagster UI. +::: + +## Customizing intervals between evaluations + +The `minimum_interval_seconds` argument allows you to specify the minimum number of seconds that will elapse between sensor evaluations. This means that the sensor won't be evaluated more frequently than the specified interval. + +It's important to note that this interval represents a minimum interval between runs of the sensor and not the exact frequency the sensor runs. If a sensor takes longer to complete than the specified interval, the next evaluation will be delayed accordingly. + +```python +# Sensor will be evaluated at least every 30 seconds +@dg.sensor(job=my_job, minimum_interval_seconds=30) +def new_file_sensor(): + ... +``` + +In this example, if the `new_file_sensor`'s evaluation function takes less than a second to run, you can expect the sensor to run consistently around every 30 seconds. However, if the evaluation function takes longer, the interval between evaluations will be longer. + +## Preventing duplicate runs + +To prevent duplicate runs, you can use run keys to uniquely identify each `RunRequest`. In the [previous example](#basic-sensor), the `RunRequest` was constructed with a `run_key`: + +``` +yield dg.RunRequest(run_key=filename) +``` + +For a given sensor, a single run is created for each `RunRequest` with a unique `run_key`. Dagster will skip processing requests with previously used run keys, ensuring that duplicate runs won't be created. + +## Cursors and high volume events + +When dealing with a large number of events, you may want to implement a cursor to optimize sensor performance. Unlike run keys, cursors allow you to implement custom logic that manages state. + +The following example demonstrates how you might use a cursor to only create `RunRequests` for files in a directory that have been updated since the last time the sensor ran. + + + +For sensors that consume multiple event streams, you may need to serialize and deserialize a more complex data structure in and out of the cursor string to keep track of the sensor's progress over the multiple streams. + +:::note +The preceding example uses both a `run_key` and a cursor, which means that if the cursor is reset but the files don't change, new runs won't be launched. This is because the run keys associated with the files won't change. + +If you want to be able to reset a sensor's cursor, don't set `run_key`s on `RunRequest`s. +::: + +## Next steps + +By understanding and effectively using these automation methods, you can build more efficient data pipelines that respond to your specific needs and constraints. + +- Run pipelines on a [schedule](/guides/schedules) +- Trigger cross-job dependencies with [asset sensors](/guides/asset-sensors) +- Explore [Declarative Automation](/concepts/automation/declarative-automation) as an alternative to sensors \ No newline at end of file diff --git a/docs/docs-beta/docs/guides/some-guide.md b/docs/docs-beta/docs/guides/some-guide.md new file mode 100644 index 0000000000000..25867d5cbee78 --- /dev/null +++ b/docs/docs-beta/docs/guides/some-guide.md @@ -0,0 +1,3 @@ +--- +unlisted: true +--- diff --git a/docs/docs-beta/docs/guides/transform-dbt.md b/docs/docs-beta/docs/guides/transform-dbt.md new file mode 100644 index 0000000000000..758133ae012e7 --- /dev/null +++ b/docs/docs-beta/docs/guides/transform-dbt.md @@ -0,0 +1,81 @@ +--- +title: Transforming data with dbt +sidebar_position: 20 +sidebar_label: Transform data with dbt +last_update: + date: 2024-08-26 + author: Nick Roach +--- + +Dagster orchestrates dbt alongside other technologies, so you can schedule dbt with Spark, Python, etc. in a single data pipeline. Dagster's asset-oriented approach allows Dagster to understand dbt at the level of individual dbt models. + +
      + Prerequisites + +To follow the steps in this guide, you'll need: + +- A basic understanding of dbt, DuckDB, and Dagster concepts such as [assets](/todo) and [resources](/todo) +- To install the [dbt](https://docs.getdbt.com/docs/core/installation-overview) and [DuckDB CLIs](https://duckdb.org/docs/api/cli/overview.html) +- To install the following packages: + + ```shell + pip install dagster duckdb plotly dagster-dbt dbt-duckdb + ``` +
      + +## Setting up a basic dbt project + +Start by downloading this basic dbt project, which includes a few models and a DuckDB backend: + +```bash +git clone https://github.com/dagster-io/basic-dbt-project +``` + +The project structure should look like this: + +``` +├── README.md +├── dbt_project.yml +├── profiles.yml +├── models +│ └── example +│ ├── my_first_dbt_model.sql +│ ├── my_second_dbt_model.sql +│ └── schema.yml +``` + +First, you need to point Dagster at the dbt project and ensure Dagster has what it needs to build an asset graph. Create a `definitions.py` in the same directory as the dbt project: + + + +## Adding upstream dependencies + +Oftentimes, you'll want Dagster to generate data that will be used by downstream dbt models. To do this, add an upstream asset that the dbt project will as a source: + + + +Next, you'll add a dbt model that will source the `raw_customers` asset and define the dependency for Dagster. Create the dbt model: + + + +Next, create a `_source.yml` file that points dbt to the upstream `raw_customers` asset: + + + +![Screenshot of dbt lineage](/img/placeholder.svg) + +## Adding downstream dependencies + +You may also have assets that depend on the output of dbt models. Next, create an asset that depends on the result of the new `customers` model. This asset will create a histogram of the first names of the customers: + + + +## Scheduling dbt models + +You can schedule your dbt models by using the `dagster-dbt`'s `build_schedule_from_dbt_selection` function: + + + +## Next steps + +{/* TODO: Add link to dbt partitioning guide */} diff --git a/docs/docs-beta/docs/guides/unit-tests-assets-and-ops.md b/docs/docs-beta/docs/guides/unit-tests-assets-and-ops.md new file mode 100644 index 0000000000000..e98707e995760 --- /dev/null +++ b/docs/docs-beta/docs/guides/unit-tests-assets-and-ops.md @@ -0,0 +1,111 @@ +--- +title: "Unit testing assets and ops" +sidebar_position: 30 +sidebar_label: "Unit testing" +--- + +Unit testing is essential for ensuring that computations function as intended. In the context of data pipelines, this can be particularly challenging. However, Dagster streamlines the process by enabling direct invocation of computations with specified input values and mocked resources, making it easier to verify that data transformations behave as expected. + +While unit tests can't fully replace integration tests or manual review, they can catch a variety of errors with a significantly faster feedback loop. + +This guide covers how to write unit tests for assets and ops with a variety of different input requirements. + +
      +Prerequisites + +To follow the steps in this guide, you'll need familiarity with: + +- [Assets](/concepts/assets) +- [Ops and Jobs](/concepts/ops-jobs) +
      + +## Before you start + +Before you begin implementing unit tests, note that: + +- Testing individual assets or ops is generally recommended over unit testing entire jobs +- Unit testing isn't recommended in cases where most of the business logic is encoded in an external system, such as an asset which directly invokes an external Databricks job. + +## Assets and ops without arguments \{#no-arguments} + +The simplest assets and ops to test are those with no arguments. In these cases, you can directly invoke definitions. + + + + + + + + + + +## Assets and ops with upstream dependencies \{#upstream-dependencies} + +If an asset or op has an upstream dependency, you can directly pass a value for that dependency when invoking the definition. + + + + + + + + + + +## Assets and ops with config \{#config} + +If an asset or op uses config, you can construct an instance of the required config object and pass it in directly. + + + + + + + + + + +## Assets and ops with resources \{#resources} + +If an asset or op uses a resource, it can be useful to create a mock instance of the resource to avoid interacting with external services. + + + + + + + + + + +## Assets and ops with context \{#context} + +If an asset or op uses a `context` argument, you can use `build_asset_context()` or `build_op_context()` to construct a context object. + + + + + + + + + + +## Assets and ops with multiple parameters \{#multiple-parameters} + +If an asset or op has multiple parameters, it's recommended to use keyword arguments for clarity. + + + + + + + + + + +## Next steps + +- Learn more about assets in [Understanding Assets](/concepts/assets) +- Learn more about ops in [Understanding Ops](/concepts/ops-jobs) +- Learn more about resources in [Resources](/concepts/resources) \ No newline at end of file diff --git a/docs/docs-beta/docs/integrations/airbyte.md b/docs/docs-beta/docs/integrations/airbyte.md new file mode 100644 index 0000000000000..eec45c87481fb --- /dev/null +++ b/docs/docs-beta/docs/integrations/airbyte.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Airbyte +title: Dagster & Airbyte +sidebar_label: Airbyte +excerpt: Orchestrate Airbyte connections and schedule syncs alongside upstream or downstream dependencies. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-airbyte +docslink: https://docs.dagster.io/integrations/airbyte +partnerlink: https://airbyte.com/tutorials/orchestrate-data-ingestion-and-transformation-pipelines +logo: /integrations/airbyte.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +Using this integration, you can trigger Airbyte syncs and orchestrate your Airbyte connections from within Dagster, making it easy to chain an Airbyte sync with upstream or downstream steps in your workflow. + +### Installation + +```bash +pip install dagster-airbyte +``` + +### Example + + + +### About Airbyte + +**Airbyte** is an open source data integration engine that helps you consolidate your SaaS application and database data into your data warehouses, lakes and databases. diff --git a/docs/docs-beta/docs/integrations/aws/athena.md b/docs/docs-beta/docs/integrations/aws/athena.md new file mode 100644 index 0000000000000..e17f95e077147 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/athena.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Athena +title: Dagster & AWS Athena +sidebar_label: Athena +excerpt: This integration allows you to connect to AWS Athena and analyze data in Amazon S3 using standard SQL within your Dagster pipelines. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-athena.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to connect to AWS Athena, a serverless interactive query service that makes it easy to analyze data in Amazon S3 using standard SQL. Using this integration, you can issue queries to Athena, fetch results, and handle query execution states within your Dagster pipelines. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Athena + +AWS Athena is a serverless, interactive query service that allows you to analyze data directly in Amazon S3 using standard SQL. Athena is easy to use; point to your data in Amazon S3, define the schema, and start querying using standard SQL. Most results are delivered within seconds. With Athena, there are no infrastructure setups, and you pay only for the queries you run. It scales automatically—executing queries in parallel—so results are fast, even with large datasets and complex queries. diff --git a/docs/docs-beta/docs/integrations/aws/cloudwatch.md b/docs/docs-beta/docs/integrations/aws/cloudwatch.md new file mode 100644 index 0000000000000..6f31e5b7fbf02 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/cloudwatch.md @@ -0,0 +1,45 @@ +--- +layout: Integration +status: published +name: CloudWatch +title: Dagster & AWS CloudWatch +sidebar_label: CloudWatch +excerpt: This integration allows you to send Dagster logs to AWS CloudWatch, enabling centralized logging and monitoring of your Dagster jobs. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-cloudwatch.svg +categories: + - Monitoring +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to send Dagster logs to AWS CloudWatch, enabling centralized logging and monitoring of your Dagster jobs. By using AWS CloudWatch, you can take advantage of its powerful log management features, such as real-time log monitoring, log retention policies, and alerting capabilities. + +Using this integration, you can configure your Dagster jobs to log directly to AWS CloudWatch, making it easier to track and debug your workflows. This is particularly useful for production environments where centralized logging is essential for maintaining observability and operational efficiency. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS CloudWatch + +AWS CloudWatch is a monitoring and observability service provided by Amazon Web Services (AWS). It allows you to collect, access, and analyze performance and operational data from a variety of AWS resources, applications, and services. With AWS CloudWatch, you can set up alarms, visualize logs and metrics, and gain insights into your infrastructure and applications to ensure they're running smoothly. + +AWS CloudWatch provides features such as: + +- Real-time monitoring: Track the performance of your applications and infrastructure in real-time. +- Log management: Collect, store, and analyze log data from various sources. +- Alarms and notifications: Set up alarms to automatically notify you of potential issues. +- Dashboards: Create custom dashboards to visualize metrics and logs. +- Integration with other AWS services: Seamlessly integrate with other AWS services for a comprehensive monitoring solution. diff --git a/docs/docs-beta/docs/integrations/aws/ecr.md b/docs/docs-beta/docs/integrations/aws/ecr.md new file mode 100644 index 0000000000000..dfaec5dea91f8 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/ecr.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: ECR +title: Dagster & AWS ECR +sidebar_label: ECR +excerpt: This integration allows you to connect to AWS Elastic Container Registry (ECR), enabling you to manage your container images more effectively in your Dagster pipelines. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-ecr.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to connect to AWS Elastic Container Registry (ECR). It provides resources to interact with AWS ECR, enabling you to manage your container images. + +Using this integration, you can seamlessly integrate AWS ECR into your Dagster pipelines, making it easier to manage and deploy containerized applications. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS ECR + +AWS Elastic Container Registry (ECR) is a fully managed Docker container registry that makes it easy for developers to store, manage, and deploy Docker container images. AWS ECR is integrated with Amazon Elastic Kubernetes Service (EKS), simplifying your development to production workflow. With ECR, you can securely store and manage your container images and easily integrate with your existing CI/CD pipelines. AWS ECR provides high availability and scalability, ensuring that your container images are always available when you need them. diff --git a/docs/docs-beta/docs/integrations/aws/emr.md b/docs/docs-beta/docs/integrations/aws/emr.md new file mode 100644 index 0000000000000..4a055872d2a1f --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/emr.md @@ -0,0 +1,42 @@ +--- +layout: Integration +status: published +name: EMR +title: Dagster & AWS EMR +sidebar_label: EMR +excerpt: The AWS EMR integration allows you to seamlessly integrate AWS EMR into your Dagster pipelines for petabyte-scale data processing using open source tools like Apache Spark, Hive, Presto, and more. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-emr.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +The `dagster-aws` integration provides ways orchestrating data pipelines that leverage AWS services, including AWS EMR (Elastic MapReduce). This integration allows you to run and scale big data workloads using open source tools such as Apache Spark, Hive, Presto, and more. + +Using this integration, you can: + +- Seamlessly integrate AWS EMR into your Dagster pipelines. +- Utilize EMR for petabyte-scale data processing. +- Easily manage and monitor EMR clusters and jobs from within Dagster. +- Leverage Dagster's orchestration capabilities to handle complex data workflows involving EMR. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS EMR + +**AWS EMR** (Elastic MapReduce) is a cloud big data platform for processing vast amounts of data using open source tools such as Apache Spark, Apache Hive, Apache HBase, Apache Flink, Apache Hudi, and Presto. It simplifies running big data frameworks, allowing you to process and analyze large datasets quickly and cost-effectively. AWS EMR provides the scalability, flexibility, and reliability needed to handle complex data processing tasks, making it an ideal choice for data engineers and scientists. diff --git a/docs/docs-beta/docs/integrations/aws/glue.md b/docs/docs-beta/docs/integrations/aws/glue.md new file mode 100644 index 0000000000000..e06ce1494ba57 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/glue.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Glue +title: Dagster & AWS Glue +sidebar_label: Glue +excerpt: The AWS Glue integration enables you to initiate AWS Glue jobs directly from Dagster, seamlessly pass parameters to your code, and stream logs and structured messages back into Dagster. +date: 2024-08-20 +apireflink: https://docs.dagster.io/concepts/dagster-pipes/aws-glue +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-glue.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +The `dagster-aws` integration library provides the `PipesGlueClient` resource, enabling you to launch AWS Glue jobs directly from Dagster assets and ops. This integration allows you to pass parameters to Glue code while Dagster receives real-time events, such as logs, asset checks, and asset materializations, from the initiated jobs. With minimal code changes required on the job side, this integration is both efficient and easy to implement. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Glue + +**AWS Glue** is a fully managed cloud service designed to simplify and automate the process of discovering, preparing, and integrating data for analytics, machine learning, and application development. It supports a wide range of data sources and formats, offering seamless integration with other AWS services. AWS Glue provides the tools to create, run, and manage ETL (Extract, Transform, Load) jobs, making it easier to handle complex data workflows. Its serverless architecture allows for scalability and flexibility, making it a preferred choice for data engineers and analysts who need to process and prepare data efficiently. diff --git a/docs/docs-beta/docs/integrations/aws/index.md b/docs/docs-beta/docs/integrations/aws/index.md new file mode 100644 index 0000000000000..481931c7b81e2 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/index.md @@ -0,0 +1,7 @@ +--- +title: AWS +--- + +import DocCardList from '@theme/DocCardList'; + + diff --git a/docs/docs-beta/docs/integrations/aws/lambda.md b/docs/docs-beta/docs/integrations/aws/lambda.md new file mode 100644 index 0000000000000..4dd4ba58e903d --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/lambda.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Lambda +title: Dagster & AWS Lambda +sidebar_label: Lambda +excerpt: Using the AWS Lambda integration with Dagster, you can leverage serverless functions to execute external code in your pipelines. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-lambda.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +Using this integration, you can leverage AWS Lambda to execute external code as part of your Dagster pipelines. This is particularly useful for running serverless functions that can scale automatically and handle various workloads without the need for managing infrastructure. The `PipesLambdaClient` class allows you to invoke AWS Lambda functions and stream logs and structured metadata back to Dagster's UI and tools. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Lambda + +**AWS Lambda** is a serverless compute service provided by Amazon Web Services (AWS). It allows you to run code without provisioning or managing servers. AWS Lambda automatically scales your application by running code in response to each trigger, such as changes to data in an Amazon S3 bucket or an update to a DynamoDB table. You can use AWS Lambda to extend other AWS services with custom logic, or create your own backend services that operate at AWS scale, performance, and security. diff --git a/docs/docs-beta/docs/integrations/aws/redshift.md b/docs/docs-beta/docs/integrations/aws/redshift.md new file mode 100644 index 0000000000000..053bd366b417f --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/redshift.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Redshift +title: Dagster & AWS Redshift +sidebar_label: Redshift +excerpt: 'Using this integration, you can seamlessly integrate AWS Redshift into your Dagster workflows, leveraging Redshifts data warehousing capabilities for your data pipelines.' +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-redshift.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +Using this integration, you can connect to an AWS Redshift cluster and issue queries against it directly from your Dagster assets. This allows you to seamlessly integrate Redshift into your data pipelines, leveraging the power of Redshift's data warehousing capabilities within your Dagster workflows. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Redshift + +**AWS Redshift** is a fully managed, petabyte-scale data warehouse service in the cloud. You can start with just a few hundred gigabytes of data and scale to a petabyte or more. This enables you to use your data to acquire new insights for your business and customers. Redshift offers fast query performance using SQL-based tools and business intelligence applications, making it a powerful tool for data warehousing and analytics. diff --git a/docs/docs-beta/docs/integrations/aws/s3.md b/docs/docs-beta/docs/integrations/aws/s3.md new file mode 100644 index 0000000000000..e617605730442 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/s3.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: S3 +title: Dagster & AWS S3 +sidebar_label: S3 +excerpt: The AWS S3 integration allows data engineers to easily read and write objects to the durable AWS S3 storage, enabling engineers to have a resilient storage layer when constructing their pipelines. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-s3.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +The AWS S3 integration allows data engineers to easily read, and write objects to the durable AWS S3 storage -- enabling engineers to a resilient storage layer when constructing their pipelines. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + +Here is an example of how to use the `S3Resource` in a Dagster job to interact with AWS S3: + + + +### About AWS S3 + +**AWS S3** is an object storage service that offers industry-leading scalability, data availability, security, and performance. This means customers of all sizes and industries can use it to store and protect any amount of data for a range of use cases, such as data lakes, websites, mobile applications, backup and restore, archive, enterprise applications, IoT devices, and big data analytics. Amazon S3 provides easy-to-use management features so you can organize your data and configure finely tuned access controls to meet your specific business, organizational, and compliance requirements. diff --git a/docs/docs-beta/docs/integrations/aws/secretsmanager.md b/docs/docs-beta/docs/integrations/aws/secretsmanager.md new file mode 100644 index 0000000000000..736b84fc56fb4 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/secretsmanager.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Secrets Manager +title: Dagster & AWS Secrets Manager +sidebar_label: Secrets Manager +excerpt: This integration allows you to manage, retrieve, and rotate credentials, API keys, and other secrets using AWS Secrets Manager. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-secretsmanager.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to manage, retrieve, and rotate credentials, API keys, and other secrets using [AWS Secrets Manager](https://aws.amazon.com/secrets-manager/). + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Secrets Manager + +**AWS Secrets Manager** helps you protect access to your applications, services, and IT resources without the upfront cost and complexity of managing your own hardware security module infrastructure. With Secrets Manager, you can rotate, manage, and retrieve database credentials, API keys, and other secrets throughout their lifecycle. Users and applications retrieve secrets with a call to Secrets Manager APIs, eliminating the need to hardcode sensitive information in plain text. diff --git a/docs/docs-beta/docs/integrations/aws/ssm.md b/docs/docs-beta/docs/integrations/aws/ssm.md new file mode 100644 index 0000000000000..36f480a509482 --- /dev/null +++ b/docs/docs-beta/docs/integrations/aws/ssm.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Systems Parameter Store +title: Dagster & AWS Systems Parameter Store +sidebar_label: Systems Parameter Store +excerpt: The Dagster AWS Systems Manager (SSM) Parameter Store integration allows you to manage and retrieve parameters stored in AWS SSM Parameter Store directly within your Dagster pipelines. +date: 2024-06-21 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-aws +docslink: +partnerlink: https://aws.amazon.com/ +logo: /integrations/aws-ssm.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +The Dagster AWS Systems Manager (SSM) Parameter Store integration allows you to manage and retrieve parameters stored in AWS SSM Parameter Store directly within your Dagster pipelines. This integration provides resources to fetch parameters by name, tags, or paths, and optionally set them as environment variables for your operations. + +### Installation + +```bash +pip install dagster-aws +``` + +### Examples + + + +### About AWS Systems Parameter Store + +**AWS Systems Manager Parameter Store** is a secure storage service for configuration data management and secrets management. It allows you to store data such as passwords, database strings, and license codes as parameter values. You can then reference these parameters in your applications or scripts, ensuring that sensitive information isn't hard-coded or exposed in your codebase. + +AWS Systems Manager Parameter Store integrates with AWS Identity and Access Management (IAM) to control access to parameters, and it supports encryption using AWS Key Management Service (KMS) to protect sensitive data. This service is essential for maintaining secure and manageable configurations across your AWS environment. diff --git a/docs/docs-beta/docs/integrations/azure-adls2.md b/docs/docs-beta/docs/integrations/azure-adls2.md new file mode 100644 index 0000000000000..9f766bac09d09 --- /dev/null +++ b/docs/docs-beta/docs/integrations/azure-adls2.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Azure Data Lake Storage Gen 2 (ADLS2) +title: Dagster & Azure Data Lake Storage Gen 2 (ADLS2) +sidebar_label: Azure Data Lake Storage Gen 2 (ADLS2) +excerpt: Get utilities for ADLS2 and Blob Storage. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-azure +docslink: +partnerlink: https://azure.microsoft.com/ +logo: /integrations/Azure.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +Dagster helps you use Azure Storage Accounts as part of your data pipeline. Azure Data Lake Storage Gen 2 (ADLS2) is our primary focus but we also provide utilities for Azure Blob Storage. + +### Installation + +```bash +pip install dagster-azure +``` + +### Examples + + + +In this updated code, we use `ADLS2Resource` directly instead of `adls2_resource`. The configuration is passed to `ADLS2Resource` during its instantiation. + +### About Azure Data Lake Storage Gen 2 (ADLS2) + +**Azure Data Lake Storage Gen 2 (ADLS2)** is a set of capabilities dedicated to big data analytics, built on Azure Blob Storage. ADLS2 combines the scalability, cost-effectiveness, security, and rich capabilities of Azure Blob Storage with a high-performance file system that's built for analytics and is compatible with the Hadoop Distributed File System (HDFS). This makes it an ideal choice for data lakes and big data analytics. diff --git a/docs/docs-beta/docs/integrations/census.md b/docs/docs-beta/docs/integrations/census.md new file mode 100644 index 0000000000000..ae13177111531 --- /dev/null +++ b/docs/docs-beta/docs/integrations/census.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Census +title: Dagster & Census +sidebar_label: Census +excerpt: Trigger Census synchs from within your Dagster pipelines. +date: 2022-11-07 +apireflink: http://docs.dagster.io/_apidocs/libraries/dagster-census +partnerlink: https://www.getcensus.com/ +communityIntegration: true +logo: /integrations/Census.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +With the `dagster-census` integration you can execute a Census sync and poll until that sync completes, raising an error if it's unsuccessful. + +### Installation + +```bash +pip install dagster-census +``` + +### Example + + + +### About Census + +**Census** syncs data from your cloud warehouse to the SaaS tools your organization uses. It allows everyone in your organization to take action with good data, no custom scripts or API integrations required. diff --git a/docs/docs-beta/docs/integrations/cube.md b/docs/docs-beta/docs/integrations/cube.md new file mode 100644 index 0000000000000..9bac9cd168c7b --- /dev/null +++ b/docs/docs-beta/docs/integrations/cube.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Cube +title: Dagster & Cube +sidebar_label: Cube +excerpt: 'Push changes from upstream data sources to Cubes semantic layer.' +date: 2023-08-30 +apireflink: https://cube.dev/docs/orchestration-api/dagster +partnerlink: https://cube.dev/ +communityIntegration: true +logo: /integrations/cube.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +With the `dagster_cube` integration you can setup Cube and Dagster to work together so that Dagster can push changes from upstream data sources to Cube using its integration API. + +### Installation + +```bash +pip install dagster_cube +``` + +### Example + + + +### About Cube + +**Cube.js** is the semantic layer for building data applications. It helps data engineers and application developers access data from modern data stores, organize it into consistent definitions, and deliver it to every application. diff --git a/docs/docs-beta/docs/integrations/databricks.md b/docs/docs-beta/docs/integrations/databricks.md new file mode 100644 index 0000000000000..92ee6a0fc6a78 --- /dev/null +++ b/docs/docs-beta/docs/integrations/databricks.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Databricks +title: Dagster & Databricks +sidebar_label: Databricks +excerpt: The Databricks integration enables you to initiate Databricks jobs directly from Dagster, seamlessly pass parameters to your code, and stream logs and structured messages back into Dagster. +date: 2024-08-20 +apireflink: https://docs.dagster.io/concepts/dagster-pipes/databricks +docslink: +partnerlink: https://databricks.com/ +logo: /integrations/databricks.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +The `dagster-databricks` integration library provides the `PipesDatabricksClient` resource, enabling you to launch Databricks jobs directly from Dagster assets and ops. This integration allows you to pass parameters to Databricks code while Dagster receives real-time events, such as logs, asset checks, and asset materializations, from the initiated jobs. With minimal code changes required on the job side, this integration is both efficient and easy to implement. + +### Installation + +```bash +pip install dagster-databricks +``` + +### Example + + + + + +### About Databricks + +**Databricks** is a unified data analytics platform that simplifies and accelerates the process of building big data and AI solutions. It integrates seamlessly with Apache Spark and offers support for various data sources and formats. Databricks provides powerful tools to create, run, and manage data pipelines, making it easier to handle complex data engineering tasks. Its collaborative and scalable environment is ideal for data engineers, scientists, and analysts who need to process and analyze large datasets efficiently. diff --git a/docs/docs-beta/docs/integrations/datadog.md b/docs/docs-beta/docs/integrations/datadog.md new file mode 100644 index 0000000000000..9bac8f21649b5 --- /dev/null +++ b/docs/docs-beta/docs/integrations/datadog.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Datadog +title: Dagster & Datadog +sidebar_label: Datadog +excerpt: Publish metrics to Datadog from within Dagster ops and entralize your monitoring metrics. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-datadog +docslink: +partnerlink: https://www.datadoghq.com/ +logo: /integrations/Datadog.svg +categories: + - Monitoring +enabledBy: +enables: +--- + +### About this integration + +While Dagster provides comprehensive monitoring and observability of the pipelines it orchestrates, many teams look to centralize all their monitoring across apps, processes and infrastructure using Datadog's 'Cloud Monitoring as a Service'. The `dagster-datadog` integration allows you to publish metrics to Datadog from within Dagster ops. + +### Installation + +```bash +pip install dagster-datadog +``` + +### Example + + + +### About Datadog + +**Datadog** is an observability service for cloud-scale applications, providing monitoring of servers, databases, tools, and services, through a SaaS-based data analytics platform. diff --git a/docs/docs-beta/docs/integrations/dbt-cloud.md b/docs/docs-beta/docs/integrations/dbt-cloud.md new file mode 100644 index 0000000000000..ae375f429e15e --- /dev/null +++ b/docs/docs-beta/docs/integrations/dbt-cloud.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: dbt Cloud +title: Dagster & dbt Cloud +sidebar_label: dbt Cloud +excerpt: Run dbt Cloud™ jobs as part of your data pipeline. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-dbt#assets-dbt-cloud +docslink: https://docs.dagster.io/integrations/dbt_cloud +partnerlink: +logo: /integrations/dbt.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +Dagster allows you to run dbt Cloud jobs alongside other technologies. You can schedule them to run as a step in a larger pipeline and manage them as a data asset. + +### Installation + +```bash +pip install dagster-dbt +``` + +### Example + + + +### About dbt Cloud + +**dbt Cloud** is a hosted service for running dbt jobs. It helps data analysts and engineers productionize dbt deployments. Beyond dbt open source, dbt Cloud provides scheduling , CI/CD, serving documentation, and monitoring & alerting. + +If you're currently using dbt Cloud™, you can also use Dagster to run `dbt-core` in its place. You can read more about [how to do that here](https://dagster.io/blog/migrate-off-dbt-cloud). diff --git a/docs/docs-beta/docs/integrations/dbt.md b/docs/docs-beta/docs/integrations/dbt.md new file mode 100644 index 0000000000000..cd2b7f873e3f2 --- /dev/null +++ b/docs/docs-beta/docs/integrations/dbt.md @@ -0,0 +1,47 @@ +--- +layout: Integration +status: published +name: dbt +title: Dagster & dbt +sidebar_label: dbt +excerpt: Put your dbt transformations to work, directly from within Dagster. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-dbt +docslink: https://docs.dagster.io/integrations/dbt +partnerlink: https://www.getdbt.com/ +logo: /integrations/dbt.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +Dagster orchestrates dbt alongside other technologies, so you can schedule dbt with Spark, Python, etc. in a single data pipeline. + +Dagster assets understand dbt at the level of individual dbt models. This means that you can: + +- Use Dagster's UI or APIs to run subsets of your dbt models, seeds, and snapshots. +- Track failures, logs, and run history for individual dbt models, seeds, and snapshots. +- Define dependencies between individual dbt models and other data assets. For example, put dbt models after the Fivetran-ingested table that they read from, or put a machine learning after the dbt models that it's trained from. + +### Installation + +```bash +pip install dagster-dbt +``` + +### Example + + + +### About dbt + +**dbt** is a SQL-first transformation workflow that lets teams quickly and collaboratively deploy analytics code following software engineering best practices like modularity, portability, CI/CD, and documentation. + + diff --git a/docs/docs-beta/docs/integrations/deltalake.md b/docs/docs-beta/docs/integrations/deltalake.md new file mode 100644 index 0000000000000..175fc173c5534 --- /dev/null +++ b/docs/docs-beta/docs/integrations/deltalake.md @@ -0,0 +1,43 @@ +--- +layout: Integration +status: published +name: Delta Lake +title: Dagster & Delta Lake +sidebar_label: Delta Lake +excerpt: Integrate your pipelines into Delta Lake. +date: 2022-11-07 +communityIntegration: true +apireflink: https://delta-io.github.io/delta-rs/integrations/delta-lake-dagster/ +docslink: +partnerlink: https://delta.io/ +logo: /integrations/DeltaLake.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +Delta Lake is a great storage format for Dagster workflows. With this integration, you can use the Delta Lake I/O Manager to read and write your Dagster assets. + +Here are some of the benefits that Delta Lake provides Dagster users: + +- Native PyArrow integration for lazy computation of large datasets +- More efficient querying with file skipping with Z Ordering and liquid clustering +- Built-in vacuuming to remove unnecessary files and versions +- ACID transactions for reliable writes +- Smooth versioning integration (versions can be use to trigger downstream updates). +- Surfacing table stats based on the file statistics + +### Installation + +```bash +pip install dagster-deltalake +pip install dagster-deltalake-pandas +pip install dagster-deltalake-polars +``` + +### About Delta Lake + +Delta Lake is an open source storage framework that enables building a Lakehouse architecture with compute engines including Spark, PrestoDB, Flink, Trino, and Hive and APIs for Scala, Java, Rust, and Python. diff --git a/docs/docs-beta/docs/integrations/dlt.md b/docs/docs-beta/docs/integrations/dlt.md new file mode 100644 index 0000000000000..9381022348790 --- /dev/null +++ b/docs/docs-beta/docs/integrations/dlt.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: dlt +title: Dagster & dlt +sidebar_label: dlt +excerpt: Easily ingest and replicate data between systems with dlt through Dagster. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-embedded-elt +docslink: https://docs.dagster.io/integrations/embedded-elt/dlt +partnerlink: https://www.getdbt.com/ +logo: /integrations/dlthub.jpeg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to use [dlt](https://dlthub.com/) to easily ingest and replicate data between systems through Dagster. + +### Installation + +```bash +pip install dagster-embedded-elt +``` + +### Example + + + +### About dlt + +[Data Load Tool (dlt)](https://dlthub.com/) is an open source library for creating efficient data pipelines. It offers features like secret management, data structure conversion, incremental updates, and pre-built sources and destinations, simplifying the process of loading messy data into well-structured datasets. diff --git a/docs/docs-beta/docs/integrations/docker.md b/docs/docs-beta/docs/integrations/docker.md new file mode 100644 index 0000000000000..16bdd0ab960c0 --- /dev/null +++ b/docs/docs-beta/docs/integrations/docker.md @@ -0,0 +1,40 @@ +--- +layout: Integration +status: published +name: Docker +title: Dagster & Docker +sidebar_label: Docker +excerpt: Run runs external processes in docker containers directly from Dagster. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-docker +docslink: +partnerlink: https://www.docker.com/ +logo: /integrations/Docker.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +The `dagster-docker` integration library provides the `PipesDockerClient` resource, enabling you to launch Docker containers and execute external code directly from Dagster assets and ops. This integration allows you to pass parameters to Docker containers while Dagster receives real-time events, such as logs, asset checks, and asset materializations, from the initiated jobs. With minimal code changes required on the job side, this integration is both efficient and easy to implement. + +### Installation + +```bash +pip install dagster-docker +``` + +### Example + + + +### Deploying to Docker? + +- Deploying to Dagster+: Use with a Dagster+ Hybrid deployment, the Docker agent executes Dagster jobs on a Docker cluster. Checkout the [Dagster+ Docker Agent](https://docs.dagster.io/dagster-plus/deployment/agents/docker) guide for more information. +- Deploying to Open Source: Visit the [Deploying Dagster to Docker](https://docs.dagster.io/deployment/guides/docker) guide for more information. + +### About Docker + +**Docker** is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers. The service has both free and premium tiers. The software that hosts the containers is called Docker Engine. diff --git a/docs/docs-beta/docs/integrations/duckdb.md b/docs/docs-beta/docs/integrations/duckdb.md new file mode 100644 index 0000000000000..5335df37db094 --- /dev/null +++ b/docs/docs-beta/docs/integrations/duckdb.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: DuckDB +title: Dagster & DuckDB +sidebar_label: DuckDB +excerpt: Read and write natively to DuckDB from Software Defined Assets. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-duckdb +docslink: https://dagster.io/blog/duckdb-data-lake +partnerlink: https://duckdb.org/ +logo: /integrations/Duckdb.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +This library provides an integration with the DuckDB database, and allows for an out-of-the-box [I/O Manager](https://docs.dagster.io/concepts/io-management/io-managers) so that you can make DuckDB your storage of choice. + +### Installation + +```bash +pip install dagster-duckdb +``` + +### Example + + + +### About DuckDB + +**DuckDB** is a column-oriented in-process OLAP database. A typical OLTP relational database like SQLite is row-oriented. In row-oriented database, data is organised physically as consecutive tuples. diff --git a/docs/docs-beta/docs/integrations/fivetran.md b/docs/docs-beta/docs/integrations/fivetran.md new file mode 100644 index 0000000000000..9235e4e3788e5 --- /dev/null +++ b/docs/docs-beta/docs/integrations/fivetran.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Fivetran +title: Dagster & Fivetran +sidebar_label: Fivetran +excerpt: Orchestrate Fivetran connectors and schedule syncs with upstream or downstream dependencies. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-fivetran +docslink: https://docs.dagster.io/integrations/fivetran +partnerlink: https://www.fivetran.com/ +logo: /integrations/Fivetran.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +The Dagster-Fivetran integration enables you to orchestrate data ingestion as part of a larger pipeline. Programmatically interact with the Fivetran REST API to initiate syncs and monitor their progress. + +### Installation + +```bash +pip install dagster-fivetran +``` + +### Example + + + +### About Fivetran + +**Fivetran** ingests data from SaaS applications, databases, and servers. The data is stored and typically used for analytics. diff --git a/docs/docs-beta/docs/integrations/gcp/bigquery.md b/docs/docs-beta/docs/integrations/gcp/bigquery.md new file mode 100644 index 0000000000000..3decbd33ad606 --- /dev/null +++ b/docs/docs-beta/docs/integrations/gcp/bigquery.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: BigQuery +title: Dagster & GCP BigQuery +sidebar_label: BigQuery +excerpt: Integrate with GCP BigQuery. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-gcp +docslink: +partnerlink: +logo: /integrations/gcp-bigquery.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +The Google Cloud Platform BigQuery integration allows data engineers to easily query and store data in the BigQuery data warehouse through the use of the `BigQueryResource`. + +### Installation + +```bash +pip install dagster-gcp +``` + +### Examples + + + +### About Google Cloud Platform BigQuery + +The Google Cloud Platform BigQuery service, offers a fully managed enterprise data warehouse that enables fast SQL queries using the processing power of Google's infrastructure. diff --git a/docs/docs-beta/docs/integrations/gcp/dataproc.md b/docs/docs-beta/docs/integrations/gcp/dataproc.md new file mode 100644 index 0000000000000..fbb9527fb6065 --- /dev/null +++ b/docs/docs-beta/docs/integrations/gcp/dataproc.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Dataproc +title: Dagster & GCP Dataproc +sidebar_label: Dataproc +excerpt: Integrate with GCP Dataproc. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-gcp +docslink: +partnerlink: +logo: /integrations/gcp-dataproc.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +Using this integration, you can manage and interact with Google Cloud Platform's Dataproc service directly from Dagster. This integration allows you to create, manage, and delete Dataproc clusters, and submit and monitor jobs on these clusters. + +### Installation + +```bash +pip install dagster-gcp +``` + +### Examples + + + +### About Google Cloud Platform Dataproc + +Google Cloud Platform's **Dataproc** is a fully managed and highly scalable service for running Apache Spark, Apache Hadoop, and other open source data processing frameworks. Dataproc simplifies the process of setting up and managing clusters, allowing you to focus on your data processing tasks without worrying about the underlying infrastructure. With Dataproc, you can quickly create clusters, submit jobs, and monitor their progress, all while benefiting from the scalability and reliability of Google Cloud Platform. diff --git a/docs/docs-beta/docs/integrations/gcp/gcs.md b/docs/docs-beta/docs/integrations/gcp/gcs.md new file mode 100644 index 0000000000000..4969db7e33882 --- /dev/null +++ b/docs/docs-beta/docs/integrations/gcp/gcs.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: GCS +title: Dagster & GCP GCS +sidebar_label: GCS +excerpt: Integrate with GCP GCS. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-gcp +docslink: +partnerlink: +logo: /integrations/gcp-gcs.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to interact with Google Cloud Storage (GCS) using Dagster. It provides resources, I/O Managers, and utilities to manage and store data in GCS, making it easier to integrate GCS into your data pipelines. + +### Installation + +```bash +pip install dagster-gcp +``` + +### Examples + + + +### About Google Cloud Platform GCS + +**Google Cloud Storage (GCS)**, is a scalable and secure object storage service. GCS is designed for storing and accessing any amount of data at any time, making it ideal for data science, AI infrastructure, and frameworks for ML like AutoML. With this integration, you can leverage GCS for efficient data storage and retrieval within your Dagster pipelines. diff --git a/docs/docs-beta/docs/integrations/gcp/index.md b/docs/docs-beta/docs/integrations/gcp/index.md new file mode 100644 index 0000000000000..a9b2ad3343cd3 --- /dev/null +++ b/docs/docs-beta/docs/integrations/gcp/index.md @@ -0,0 +1,7 @@ +--- +title: GCP +--- + +import DocCardList from '@theme/DocCardList'; + + diff --git a/docs/docs-beta/docs/integrations/github.md b/docs/docs-beta/docs/integrations/github.md new file mode 100644 index 0000000000000..19c278d44ea0a --- /dev/null +++ b/docs/docs-beta/docs/integrations/github.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: GitHub +title: Dagster & GitHub +sidebar_label: GitHub +excerpt: Integrate with GitHub Apps and automate operations within your github repositories. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-github +docslink: +partnerlink: https://github.com/ +logo: /integrations/Github.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +This library provides an integration with _[GitHub Apps](https://docs.github.com/en/developers/apps/getting-started-with-apps/about-apps)_ by providing a thin wrapper on the GitHub v4 GraphQL API. This allows for automating operations within your GitHub repositories and with the tighter permissions scopes that GitHub Apps allow for vs using a personal token. + +### Installation + +```bash +pip install dagster-github +``` + +### Example + + + +### About GitHub + +**GitHub** provides a highly available git repo, access control, bug tracking, software feature requests, task management, continuous integration, and wikis for open source and commercial projects. diff --git a/docs/docs-beta/docs/integrations/hashicorp.md b/docs/docs-beta/docs/integrations/hashicorp.md new file mode 100644 index 0000000000000..5664934092637 --- /dev/null +++ b/docs/docs-beta/docs/integrations/hashicorp.md @@ -0,0 +1,36 @@ +--- +layout: Integration +status: published +name: HashiCorp Vault +title: Dagster & HashiCorp Vault +sidebar_label: HashiCorp Vault +excerpt: Centrally manage credentials and certificates, then use them in your pipelines. +date: 2022-11-07 +apireflink: +docslink: https://github.com/silentsokolov/dagster-hashicorp +partnerlink: https://www.vaultproject.io/ +communityIntegration: true +logo: /integrations/Hashicorp.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +Package for integrating HashiCorp Vault into Dagster so that you can securely manage tokens and passwords. + +### Installation + +```bash +pip install dagster-hashicorp +``` + +### Example + + + +### About HashiCorp Vault + +**HashiCorp** provides open source tools and commercial products that enable developers, operators and security professionals to provision, secure, run and connect cloud-computing infrastructure. **HashiCorp Vault** secures, stores, and tightly controls access to tokens, passwords, certificates, API keys, and other secrets in modern computing. diff --git a/docs/docs-beta/docs/integrations/hightouch.md b/docs/docs-beta/docs/integrations/hightouch.md new file mode 100644 index 0000000000000..11f81649565c9 --- /dev/null +++ b/docs/docs-beta/docs/integrations/hightouch.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Hightouch +title: Dagster & Hightouch +sidebar_label: Hightouch +excerpt: Trigger syncs and monitor them until they complete. +date: 2022-11-07 +docslink: https://github.com/hightouchio/dagster-hightouch +partnerlink: https://hightouch.com/ +communityIntegration: true +logo: /integrations/Hightouch.svg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +With this integration you can trigger Hightouch syncs and monitor them from within Dagster. Fine-tune when Hightouch syncs kick-off, visualize their dependencies, and monitor the steps in your data activation workflow. + +This native integration helps your team more effectively orchestrate the last mile of data analytics—bringing that data from the warehouse back into the SaaS tools your business teams live in. With the `dagster-hightouch` integration, Hightouch users have more granular and sophisticated control over when data gets activated. + +### Installation + +```bash +pip install dagster-hightouch +``` + +### Example + + + +### About Hightouch + +**Hightouch** syncs data from any data warehouse into popular SaaS tools that businesses run on. Hightouch uses the power of Reverse ETL to transform core business applications from isolated data islands into powerful integrated solutions. diff --git a/docs/docs-beta/docs/integrations/index.md b/docs/docs-beta/docs/integrations/index.md new file mode 100644 index 0000000000000..233bd07d0fdc0 --- /dev/null +++ b/docs/docs-beta/docs/integrations/index.md @@ -0,0 +1,8 @@ +--- +sidebar_class_name: hidden +title: Integrations +--- + +import DocCardList from '@theme/DocCardList'; + + diff --git a/docs/docs-beta/docs/integrations/jupyter.md b/docs/docs-beta/docs/integrations/jupyter.md new file mode 100644 index 0000000000000..f0ab1db8998cf --- /dev/null +++ b/docs/docs-beta/docs/integrations/jupyter.md @@ -0,0 +1,22 @@ +--- +layout: Integration +status: published +name: Jupyter Notebooks +title: Dagster & Jupyter Notebooks +sidebar_label: Jupyter Notebooks +excerpt: Dagstermill eliminates the tedious "productionization" of Jupyter notebooks. +date: 2022-11-07 +apireflink: +docslink: https://docs.dagster.io/integrations/dagstermill +partnerlink: +logo: /integrations/Jupyter.svg +enabledBy: + - dagster-dagstermill +categories: + - Compute +enables: +--- + +### About Jupyter + +Fast iteration, the literate combination of arbitrary code with markdown blocks, and inline plotting make notebooks an indispensable tool for data science. The **Dagstermill** package makes it easy to run notebooks using the Dagster tools and to integrate them into data jobs with heterogeneous ops: for instance, Spark jobs, SQL statements run against a data warehouse, or arbitrary Python code. diff --git a/docs/docs-beta/docs/integrations/kubernetes.md b/docs/docs-beta/docs/integrations/kubernetes.md new file mode 100644 index 0000000000000..dbe389b9b2536 --- /dev/null +++ b/docs/docs-beta/docs/integrations/kubernetes.md @@ -0,0 +1,40 @@ +--- +layout: Integration +status: published +name: Kubernetes +title: Dagster & Kubernetes +sidebar_label: Kubernetes +excerpt: Launch Kubernetes pods and execute external code directly from Dagster. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-k8s +docslink: https://docs.dagster.io/concepts/dagster-pipes/kubernetes +partnerlink: https://kubernetes.io/ +logo: /integrations/Kubernetes.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +The `dagster-k8s` integration library provides the `PipesK8sClient` resource, enabling you to launch Kubernetes pods and execute external code directly from Dagster assets and ops. This integration allows you to pass parameters to Kubernetes pods while Dagster receives real-time events, such as logs, asset checks, and asset materializations, from the initiated jobs. With minimal code changes required on the job side, this integration is both efficient and easy to implement. + +### Installation + +```bash +pip install dagster-k8s +``` + +### Example + + + +### Deploying to Kubernetes? + +- Deploying to Dagster+: Use with a Dagster+ Hybrid deployment, the Kubernetes agent executes Dagster jobs on a Kubernetes cluster. Checkout the [Dagster+ Kubernetes Agent](https://docs.dagster.io/dagster-plus/deployment/agents/kubernetes) guide for more information. +- Deploying to Open Source: Visit the [Deploying Dagster to Kubernetes](https://docs.dagster.io/deployment/guides/kubernetes) guide for more information. + +### About Kubernetes + +**Kubernetes** is an open source container orchestration system for automating software deployment, scaling, and management. Google originally designed Kubernetes, but the Cloud Native Computing Foundation now maintains the project. diff --git a/docs/docs-beta/docs/integrations/lakefs.md b/docs/docs-beta/docs/integrations/lakefs.md new file mode 100644 index 0000000000000..c4901e7bc28f6 --- /dev/null +++ b/docs/docs-beta/docs/integrations/lakefs.md @@ -0,0 +1,40 @@ +--- +layout: Integration +status: published +name: LakeFS +title: Dagster & LakeFS +sidebar_label: LakeFS +excerpt: lakeFS provides version control and complete lineage over the data lake. +date: 2023-06-27 +communityIntegration: true +apireflink: https://pydocs.lakefs.io/ +docslink: +partnerlink: https://lakefs.io/ +logo: /integrations/lakefs.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +By integrating with lakeFS, a big data scale version control system, you can leverage the versioning capabilities of lakeFS to track changes to your data. This integration allows you to have a complete lineage of your data, from the initial raw data to the transformed and processed data, making it easier to understand and reproduce data transformations. + +With lakeFS and Dagster integration, you can ensure that data flowing through your Dagster jobs is easily reproducible. lakeFS provides a consistent view of your data across different versions, allowing you to troubleshoot pipeline runs and ensure consistent results. + +Furthermore, with lakeFS branching capabilities, Dagster jobs can run on separate branches without additional storage costs, creating isolation and allowing promotion of only high-quality data to production leveraging a CI/CD pipeline for your data. + +### Installation + +```bash +pip install lakefs-client +``` + +### Example + + + +### About lakeFS + +**lakeFS** is on a mission to simplify the lives of data engineers, data scientists and analysts providing a data version control platform at scale. diff --git a/docs/docs-beta/docs/integrations/looker.md b/docs/docs-beta/docs/integrations/looker.md new file mode 100644 index 0000000000000..33b936b606125 --- /dev/null +++ b/docs/docs-beta/docs/integrations/looker.md @@ -0,0 +1,36 @@ +--- +layout: Integration +status: published +name: Looker +title: Dagster & Looker +sidebar_label: Looker +excerpt: The Looker integration allows you to monitor your Looker project as assets in Dagster, along with other data assets. +date: 2024-08-30 +apireflink: +docslink: https://docs.dagster.io/_apidocs/libraries/dagster-looker +partnerlink: https://www.looker.com/ +communityIntegration: true +logo: /integrations/looker.svg +categories: + - BI +enabledBy: +enables: +--- + +### About this integration + +Dagster allows you to represent your Looker project as assets, alongside other your other technologies like dbt and Sling. This allows you to see how your Looker assets are connected to your other data assets, and how changes to other data assets might impact your Looker project. + +### Installation + +```bash +pip install dagster-looker +``` + +### Example + + + +### About Looker + +**Looker** is a modern platform for data analytics and visualization. It provides a unified interface for data exploration, modeling, and visualization, making it easier to understand and analyze data. Looker integrates with various data sources and can be used to create interactive reports, dashboards, and visualizations. diff --git a/docs/docs-beta/docs/integrations/meltano.md b/docs/docs-beta/docs/integrations/meltano.md new file mode 100644 index 0000000000000..3a36625b75b4c --- /dev/null +++ b/docs/docs-beta/docs/integrations/meltano.md @@ -0,0 +1,38 @@ +--- +layout: Integration +status: published +name: Meltano +title: Dagster & Meltano +sidebar_label: Meltano +excerpt: Tap into open source configurable ETL+ and the Singer integration library. +date: 2023-03-25 +apireflink: +docslink: https://github.com/quantile-development/dagster-meltano#readme +partnerlink: https://meltano.com/ +logo: /integrations/Meltano.svg +categories: + - ETL +communityIntegration: true +enabledBy: +enables: +--- + +### About this integration + +The `dagster-meltano` library allows you to run Meltano using Dagster. Design and configure ingestion jobs using the popular [Singer.io](https://singer.io) specification. + +**Note** that this integration can also be [managed from the Meltano platform](https://hub.meltano.com/utilities/dagster/) using `meltano add utility dagster` and configured using `meltano config dagster set --interactive`. + +### Installation + +```bash +pip install dagster-meltano +``` + +### Example + + + +### About Meltano + +[Meltano](https://meltano.com/) provides data engineers with a set of tools for easily creating and managing pipelines as code by providing a wide array of composable connectors. Meltano's 'CLI for ELT+' lets you test your changes before they go live. diff --git a/docs/docs-beta/docs/integrations/microsoft-teams.md b/docs/docs-beta/docs/integrations/microsoft-teams.md new file mode 100644 index 0000000000000..bed3741fdcda1 --- /dev/null +++ b/docs/docs-beta/docs/integrations/microsoft-teams.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Microsoft Teams +title: Dagster & Microsoft Teams +sidebar_label: Microsoft Teams +excerpt: Keep your team up to speed with Teams messages. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-msteams +docslink: +partnerlink: https://www.microsoft.com/en-us/microsoft-teams/group-chat-software +logo: /integrations/Microsoft Teams.svg +categories: + - Alerting +enabledBy: +enables: +--- + +### About this integration + +By configuring this resource, you can post messages to MS Teams from any Dagster op or asset. + +### Installation + +```bash +pip install dagster-msteams +``` + +### Example + + + +### About Microsoft Teams + +**Microsoft Teams** is a business communication platform. Teams offers workspace chat and videoconferencing, file storage, and application integration. diff --git a/docs/docs-beta/docs/integrations/open-metadata.md b/docs/docs-beta/docs/integrations/open-metadata.md new file mode 100644 index 0000000000000..476eaaa032b48 --- /dev/null +++ b/docs/docs-beta/docs/integrations/open-metadata.md @@ -0,0 +1,28 @@ +--- +layout: Integration +status: published +name: Open Metadata +title: Dagster & Open Metadata +sidebar_label: Open Metadata +excerpt: Configure and schedule Dagster metadata and profiler workflows from the OpenMetadata UI. +date: 2022-11-07 +apireflink: +docslink: https://docs.open-metadata.org/connectors/pipeline/dagster +partnerlink: https://open-metadata.org/ +communityIntegration: true +logo: /integrations/OpenMetadata.svg +categories: + - Metadata +enabledBy: +enables: +--- + +### About this integration + +With this integration you can create a Open Metadata service to ingest metadata produced by the Dagster application. View the Ingestion Pipeline running from the Open Metadata Service Page. + +### About Open Metadata + +Poorly organized metadata is preventing organizations from realizing the full potential of data. Most metadata is incorrect, inconsistent, stale, missing, and fragmented in silos across various disconnected tools obscuring a holistic picture of data. + +**Open Metadata** is an all-in-one platform for data discovery, data lineage, data quality, observability, governance, and team collaboration. It's one of the fastest growing open source projects with a vibrant community and adoption by a diverse set of companies in a variety of industry verticals. Powered by a centralized metadata store based on Open Metadata Standards/APIs, supporting connectors to a wide range of data services, OpenMetadata enables end-to-end metadata management, giving you the freedom to unlock the value of your data assets. diff --git a/docs/docs-beta/docs/integrations/openai.md b/docs/docs-beta/docs/integrations/openai.md new file mode 100644 index 0000000000000..16d3f84270a96 --- /dev/null +++ b/docs/docs-beta/docs/integrations/openai.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: OpenAI +title: Dagster & OpenAI +sidebar_label: OpenAI +excerpt: Integrate OpenAI calls into your Dagster pipelines, without breaking the bank. +date: 2024-03-12 +apireflink: https://platform.openai.com/docs/introduction +docslink: https://docs.dagster.io/integrations/openai +partnerlink: +logo: /integrations/openai.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +The `dagster-openai` library allows you to easily interact with the OpenAI REST API using the OpenAI Python API to build AI steps into your Dagster pipelines. You can also log OpenAI API usage metadata in Dagster Insights, giving you detailed observability on API call credit consumption. + +When paired with Dagster assets, the resource automatically logs OpenAI usage metadata in asset metadata. + +### Installation + +```bash +pip install dagster dagster-openai +``` + +### Example + + + +### About OpenAI + +OpenAI is a U.S. based artificial intelligence (AI) research organization with the goal of developing "safe and beneficial" artificial general intelligence, which it defines as "highly autonomous systems that outperform humans at most economically valuable work". diff --git a/docs/docs-beta/docs/integrations/pagerduty.md b/docs/docs-beta/docs/integrations/pagerduty.md new file mode 100644 index 0000000000000..b2ac9dbcb111c --- /dev/null +++ b/docs/docs-beta/docs/integrations/pagerduty.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: PagerDuty +title: Dagster & PagerDuty +sidebar_label: PagerDuty +excerpt: Centralize your monitoring with the dagster-pagerduty integration. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-pagerduty +docslink: +partnerlink: https://www.pagerduty.com/ +logo: /integrations/PagerDuty.svg +categories: + - Alerting +enabledBy: +enables: +--- + +### About this integration + +This library provides an integration between Dagster and PagerDuty to support creating alerts from your Dagster code. + +### Installation + +```bash +pip install dagster_pagerduty +``` + +### Example + + + +### About PagerDuty + +**PagerDuty** is a popular SaaS incident response platform. It integrates machine data & human intelligence to improve visibility & agility for Real-Time Operations. diff --git a/docs/docs-beta/docs/integrations/pandas.md b/docs/docs-beta/docs/integrations/pandas.md new file mode 100644 index 0000000000000..b051eb7a98023 --- /dev/null +++ b/docs/docs-beta/docs/integrations/pandas.md @@ -0,0 +1,31 @@ +--- +layout: Integration +status: published +name: Pandas +title: Dagster & Pandas +sidebar_label: Pandas +excerpt: Implement validation on pandas DataFrames. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-pandas +docslink: https://docs.dagster.io/integrations/pandas +partnerlink: https://pandas.pydata.org/ +logo: /integrations/Pandas.svg +categories: + - Metadata +enabledBy: +enables: +--- + +### About this integration + +Perform data validation, emit summary statistics, and enable reliable DataFrame serialization/deserialization. The dagster_pandas library provides you with the utilities for implementing validation on Pandas DataFrames. The Dagster type system generates documentation of your DataFrame constraints and makes it accessible in the Dagster UI. + +### Installation + +```bash +pip install dagster-pandas +``` + +### About Pandas + +**Pandas** is a popular Python package that provides data structures designed to make working with "relational" or "labeled" data both easy and intuitive. Pandas aims to be the fundamental high-level building block for doing practical, real-world data analysis in Python. diff --git a/docs/docs-beta/docs/integrations/pandera.md b/docs/docs-beta/docs/integrations/pandera.md new file mode 100644 index 0000000000000..6957b91cbe3f7 --- /dev/null +++ b/docs/docs-beta/docs/integrations/pandera.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Pandera +title: Dagster & Pandera +sidebar_label: Pandera +excerpt: Generate Dagster Types from Pandera dataframe schemas. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-pandera +docslink: https://docs.dagster.io/integrations/pandera +partnerlink: https://pandera.readthedocs.io/en/stable/ +logo: /integrations/Pandera.svg +categories: + - Metadata +enabledBy: +enables: +--- + +### About this integration + +The `dagster-pandera` integration library provides an API for generating Dagster Types from [Pandera DataFrame schemas](https://pandera.readthedocs.io/en/stable/dataframe_schemas.html). + +Like all Dagster types, Dagster-Pandera-generated types can be used to annotate op inputs and outputs. This provides runtime type-checking with rich error reporting and allows Dagster UI to display information about a DataFrame's structure. + +### Installation + +```bash +pip install dagster-pandera +``` + +### Example + + + +### About Pandera + +**Pandera** is a statistical data testing toolkit, and a data validation library for scientists, engineers, and analysts seeking correctness. diff --git a/docs/docs-beta/docs/integrations/prometheus.md b/docs/docs-beta/docs/integrations/prometheus.md new file mode 100644 index 0000000000000..a25da65aaaeaa --- /dev/null +++ b/docs/docs-beta/docs/integrations/prometheus.md @@ -0,0 +1,37 @@ +--- +layout: Integration +status: published +name: Prometheus +title: Dagster & Prometheus +sidebar_label: Prometheus +excerpt: Integrate with Prometheus via the prometheus_client library. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-prometheus +docslink: https://prometheus.io/ +partnerlink: +logo: /integrations/Prometheus.svg +categories: + - Monitoring +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to push metrics to the Prometheus gateway from within a Dagster pipeline. + +### Installation + +```bash +pip install dagster-prometheus +``` + +### Example + + + +### About Prometheus + +**Prometheus** is an open source systems monitoring and alerting toolkit. Originally built at SoundCloud, Prometheus joined the Cloud Native Computing Foundation in 2016 as the second hosted project, after Kubernetes. + +Prometheus collects and stores metrics as time series data along with the timestamp at which it was recorded, alongside optional key-value pairs called labels. diff --git a/docs/docs-beta/docs/integrations/sdf.md b/docs/docs-beta/docs/integrations/sdf.md new file mode 100644 index 0000000000000..ce80dd510ce77 --- /dev/null +++ b/docs/docs-beta/docs/integrations/sdf.md @@ -0,0 +1,38 @@ +--- +layout: Integration +status: published +name: SDF +title: Dagster & SDF +sidebar_label: SDF +excerpt: Put your SDF transformations to work, directly from within Dagster. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-sdf +docslink: https://docs.sdf.com/integrations/dagster/getting-started +partnerlink: https://www.sdf.com/ +communityIntegration: true +logo: /integrations/sdf.jpeg +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +SDF can integrate seamlessly with your existing Dagster projects, providing the best-in-class transformation layer while enabling you to schedule, orchestrate, and monitor your dags in Dagster. + +When it comes time to materialize your Dagster assets, you can be confident that SDF has successfully compiled your workspace, making it safe to execute locally or against your cloud data warehouse. + +### Installation + +```bash +pip install dagster-sdf +``` + +### Example + + + +### About SDF + +[SDF](https://www.sdf.com/) is a multi-dialect SQL compiler, transformation framework, and analytical database engine. It natively compiles SQL dialects, like Snowflake, and connects to their corresponding data warehouses to materialize models. diff --git a/docs/docs-beta/docs/integrations/secoda.md b/docs/docs-beta/docs/integrations/secoda.md new file mode 100644 index 0000000000000..58c4c738eff32 --- /dev/null +++ b/docs/docs-beta/docs/integrations/secoda.md @@ -0,0 +1,28 @@ +--- +layout: Integration +status: published +name: Secoda +title: Dagster & Secoda +sidebar_label: Secoda +excerpt: Help your team understand metadata from Dagster by adding context in Secoda. +date: 2024-02-24 +apireflink: +docslink: https://www.secoda.co/automations/automated-documentation-for-new-integrations-in-dagster +partnerlink: https://www.secoda.co/integrations/dagster +communityIntegration: true +logo: /integrations/Secoda.svg +categories: + - Metadata +enabledBy: +enables: +--- + +### About this integration + +Connect Dagster to Secoda and see metadata related to your Dagster assets, asset groups and jobs right in Secoda. Simplify your team's access, and remove the need to switch between tools. + +When you connect Dagster to Secoda, you can use Secoda's tools to add further context to your Dagster assets and jobs. Help your team understand metadata from Dagster by adding context in Secoda, like creating Documents, defining Metrics, and adding Tags. + +### About Secoda + +Secoda is a AI-powered data search, cataloging, lineage, and documentation platform that empowers data teams to manage data sprawl, scale infrastructure, and overcome common issues such as lack of observability, governance, and lengthy setup and integration periods. diff --git a/docs/docs-beta/docs/integrations/shell.md b/docs/docs-beta/docs/integrations/shell.md new file mode 100644 index 0000000000000..0c5653a4f47c6 --- /dev/null +++ b/docs/docs-beta/docs/integrations/shell.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Bash / Shell +title: Dagster & Bash / Shell +sidebar_label: Bash / Shell +excerpt: Execute a Bash/shell command, directly or as a read from a script file. +date: 2024-08-20 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-shell +docslink: +partnerlink: +logo: /integrations/Shell.svg +categories: + - Compute +enabledBy: +enables: +--- + +### About this integration + +Dagster comes with a native `PipesSubprocessClient` resource that enables you to launch shell commands directly from Dagster assets and ops. This integration allows you to pass parameters to external shell scripts while Dagster receives real-time events, such as logs, asset checks, and asset materializations, from the initiated external execution. With minimal code changes required on the job side, this integration is both efficient and easy to implement. + +### Installation + +```bash +pip install dagster +``` + +### Example + + + +### About shell + +A shell is a computer program that presents a command line interface which allows you to control your computer using commands entered with a keyboard instead of controlling graphical user interfaces with a mouse/keyboard/touchscreen combination. diff --git a/docs/docs-beta/docs/integrations/slack.md b/docs/docs-beta/docs/integrations/slack.md new file mode 100644 index 0000000000000..ead87dc85b4b2 --- /dev/null +++ b/docs/docs-beta/docs/integrations/slack.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Slack +title: Dagster & Slack +sidebar_label: Slack +excerpt: Up your notification game and keep stakeholders in the loop. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-slack +docslink: +partnerlink: https://slack.com/ +logo: /integrations/Slack.svg +categories: + - Alerting +enabledBy: +enables: +--- + +### About this integration + +This library provides an integration with Slack to support posting messages in your company's Slack workspace. + +### Installation + +```bash +pip install dagster-slack +``` + +### Example + + + +### About Slack + +The **Slack** messaging app provides chat, video and voice communication tools and is used extensively across companies and communities. The Dagster slack community can be found at [dagster.io/slack](https://dagster.io/slack). diff --git a/docs/docs-beta/docs/integrations/sling.md b/docs/docs-beta/docs/integrations/sling.md new file mode 100644 index 0000000000000..0c00aea6bb7e7 --- /dev/null +++ b/docs/docs-beta/docs/integrations/sling.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Sling +title: Dagster & Sling +sidebar_label: Sling +excerpt: Extract and load data from popular data sources to destinations with Sling through Dagster. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-embedded-elt +docslink: https://docs.dagster.io/integrations/embedded-elt/sling +partnerlink: https://slingdata.io/ +logo: /integrations/sling.png +categories: + - ETL +enabledBy: +enables: +--- + +### About this integration + +This integration allows you to use [Sling](https://slingdata.io/) to extract and load data from popular data sources to destinations with high performance and ease. + +### Installation + +```bash +pip install dagster-embedded-elt +``` + +### Example + + + +### About dlt + +Sling provides an easy-to-use YAML configuration layer for loading data from files, replicating data between databases, exporting custom SQL queries to cloud storage, and much more. diff --git a/docs/docs-beta/docs/integrations/snowflake.md b/docs/docs-beta/docs/integrations/snowflake.md new file mode 100644 index 0000000000000..1f8adf390a4be --- /dev/null +++ b/docs/docs-beta/docs/integrations/snowflake.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Snowflake +title: Dagster & Snowflake +sidebar_label: Snowflake +excerpt: An integration with the Snowflake data warehouse. Read and write natively to Snowflake from Software Defined Assets. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-snowflake +docslink: https://docs.dagster.io/integrations/snowflake +partnerlink: https://www.snowflake.com/en/ +logo: /integrations/Snowflake.svg +categories: + - Storage +enabledBy: +enables: +--- + +### About this integration + +This library provides an integration with the Snowflake data warehouse. Connect to Snowflake as a resource, then use the integration-provided functions to construct an op to establish connections and execute Snowflake queries. Read and write natively to Snowflake from Dagster assets. + +### Installation + +```bash +pip install dagster-snowflake +``` + +### Example + + + +### About Snowflake + +A cloud-based data storage and analytics service, generally termed "data-as-a-service". **Snowflake**'s data warehouse is one of the most widely adopted cloud warehouses for analytics. diff --git a/docs/docs-beta/docs/integrations/spark.md b/docs/docs-beta/docs/integrations/spark.md new file mode 100644 index 0000000000000..a8e1b693e82b3 --- /dev/null +++ b/docs/docs-beta/docs/integrations/spark.md @@ -0,0 +1,28 @@ +--- +layout: Integration +status: published +name: Spark +title: Dagster & Spark +sidebar_label: Spark +excerpt: Configure and run Spark jobs. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-spark +docslink: https://docs.dagster.io/integrations/spark +partnerlink: +logo: /integrations/Spark.svg +categories: + - Compute +enabledBy: + - dagster-pyspark +enables: +--- + +### About this integration + +Spark jobs typically execute on infrastructure that's specialized for Spark. Spark applications are typically not containerized or executed on Kubernetes. + +Running Spark code often requires submitting code to a Databricks or EMR cluster. `dagster-pyspark` provides a Spark class with methods for configuration and constructing the `spark-submit` command for a Spark job. + +### About Apache Spark + +**Apache Spark** is an open source unified analytics engine for large-scale data processing. Spark provides an interface for programming clusters with implicit data parallelism and fault tolerance. It also provides libraries for graph computation, SQL for structured data processing, ML, and data science. diff --git a/docs/docs-beta/docs/integrations/ssh-sftp.md b/docs/docs-beta/docs/integrations/ssh-sftp.md new file mode 100644 index 0000000000000..bd8d15ed34626 --- /dev/null +++ b/docs/docs-beta/docs/integrations/ssh-sftp.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: SSH/SFTP +title: Dagster & SSH/SFTP +sidebar_label: SSH/SFTP +excerpt: Establish encrypted connections to networked resources. +date: 2022-11-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-ssh +docslink: +partnerlink: https://www.ssh.com/academy/ssh/protocol +logo: /integrations/SSH.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +This integration provides a resource for SSH remote execution using [Paramiko](https://github.com/paramiko/paramiko). It allows you to establish secure connections to networked resources and execute commands remotely. The integration also provides an SFTP client for secure file transfers between the local and remote systems. + +### Installation + +```bash +pip install dagster-ssh +``` + +### Example + + + +### About SSH SFTP + +The **SSH protocol** allows for secure remote login with strong authentication to networked resources. It protects network connections with strong encryption. The Dagster library provides direct SSH and SFTP calls from within the execution of your pipelines. diff --git a/docs/docs-beta/docs/integrations/twilio.md b/docs/docs-beta/docs/integrations/twilio.md new file mode 100644 index 0000000000000..2b2cb4d56f3af --- /dev/null +++ b/docs/docs-beta/docs/integrations/twilio.md @@ -0,0 +1,35 @@ +--- +layout: Integration +status: published +name: Twilio +title: Dagster & Twilio +sidebar_label: Twilio +excerpt: Integrate Twilio tasks into your data pipeline runs. +date: 2024-08-30 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-twilio +docslink: +partnerlink: https://www.twilio.com/ +logo: /integrations/Twilio.svg +categories: + - Alerting +enabledBy: +enables: +--- + +### About this integration + +Use your Twilio `Account SID` and `Auth Token` to build Twilio tasks right into your Dagster pipeline. + +### Installation + +```bash +pip install dagster-twilio +``` + +### Example + + + +### About Twilio + +**Twilio** provides communication APIs for phone calls, text messages, and other communication functions. diff --git a/docs/docs-beta/docs/integrations/wandb.md b/docs/docs-beta/docs/integrations/wandb.md new file mode 100644 index 0000000000000..57a4e0aaad2af --- /dev/null +++ b/docs/docs-beta/docs/integrations/wandb.md @@ -0,0 +1,46 @@ +--- +layout: Integration +status: published +name: Weights & Biases +title: Dagster & Weights & Biases +sidebar_label: Weights & Biases +excerpt: Orchestrate your MLOps pipelines and maintain ML assets. +date: 2023-02-07 +apireflink: https://docs.dagster.io/_apidocs/libraries/dagster-wandb +docslink: https://docs.wandb.ai/guides/integrations/dagster +partnerlink: https://wandb.ai/ +communityIntegration: True +logo: /integrations/WandB.svg +categories: + - Other +enabledBy: +enables: +--- + +### About this integration + +Use Dagster and Weights & Biases (W&B) to orchestrate your MLOps pipelines and maintain ML assets. The integration with W&B makes it easy within Dagster to: + +- use and create W&B Artifacts +- use and create Registered Models in W&B Model Registry +- run training jobs on dedicated compute using W&B Launch +- use the Weights & Biases client in ops and assets + +The W&B Dagster integration provides a W&B-specific Dagster resource and I/O Manager: + +- `wandb_resource`: a Dagster resource used to authenticate and communicate to the W&B API. +- `wandb_artifacts_io_manager`: a Dagster I/O Manager used to consume W&B Artifacts. + +### Installation + +To use this integration you will need a Weights and Biases account. Then you will need an W&B API Key, a W&B entity (user or team), and a W&B project. Full installation details can be found on [the Weights and Biases website here](https://docs.wandb.ai/guides/integrations/other/dagster). + +**Note** that Weights & Biases do offer a free cloud account for personal (non-corporate) use. Check out their [pricing page](https://wandb.ai/site/pricing) for details. + +### Example + +A complete tutorial can be found on [the Weights and Biases website here](https://docs.wandb.ai/guides/integrations/other/dagster). + +### About Weights & Biases + +[Weights & Biases](https://wandb.ai/site) makes it easy to track your experiments, manage & version your data, and collaborate with your team so you can focus on building the best machine learning models. diff --git a/docs/docs-beta/docs/intro.md b/docs/docs-beta/docs/intro.md new file mode 100644 index 0000000000000..c981a60dc34ef --- /dev/null +++ b/docs/docs-beta/docs/intro.md @@ -0,0 +1,52 @@ +--- +title: Overview +description: Dagster's Documentation +slug: / +displayed_sidebar: 'docs' +hide_table_of_contents: true +--- + +import { Card, CardGroup } from '@site/src/components/Cards'; +import ThemedImage from '@theme/ThemedImage'; + +# Welcome to Dagster + +Dagster is a data orchestrator built for data engineers, with integrated lineage, observability, a declarative programming model and best-in-class testability. + + + + +## Get started + + + + Build your first Dagster pipeline in our Quickstart tutorial. + + + New to Dagster? Learn about how thinking in assets can help you manage your data better. + + + Learn about Dagster Plus, our managed offering that includes a hosted Dagster instance and many more features. + + + +## Join the Dagster community + + + + Join our Slack community to talk with other Dagster users, use our AI-powered chatbot, and get help with Dagster. + + + Star our GitHub repository and follow our development through GitHub Discussions. + + + Watch our latest videos on YouTube. + + diff --git a/docs/docs-beta/docs/partials/_Experimental.md b/docs/docs-beta/docs/partials/_Experimental.md new file mode 100644 index 0000000000000..a4089ffa6a372 --- /dev/null +++ b/docs/docs-beta/docs/partials/_Experimental.md @@ -0,0 +1,3 @@ +:::warning +This feature is considered experimental and is under active development. +::: \ No newline at end of file diff --git a/docs/docs-beta/docs/partials/_InspirationList.md b/docs/docs-beta/docs/partials/_InspirationList.md new file mode 100644 index 0000000000000..a7e64434dad2b --- /dev/null +++ b/docs/docs-beta/docs/partials/_InspirationList.md @@ -0,0 +1,7 @@ +## Want more inspiration? + +If you're looking for additional inspiration, we recommend: + +- [**Dagster Open Platform**](https://github.com/dagster-io/dagster-open-platform), which is Dagster Lab's open source data platform. This full-sized project contains real assets and other Dagster features used by the Dagster Labs team. +- [**GitHub Discussions**](https://github.com/dagster-io/dagster/discussions), where you can ask questions and get inspired by the Dagster community +- [**The Awesome Dagster repository**](https://github.com/dagster-io/awesome-dagster), which is a collection of all awesome things related to Dagster, including other users' projects, talks, articles, and more diff --git a/docs/docs-beta/docs/partials/_TestSSO.md b/docs/docs-beta/docs/partials/_TestSSO.md new file mode 100644 index 0000000000000..319ac0a41d8a5 --- /dev/null +++ b/docs/docs-beta/docs/partials/_TestSSO.md @@ -0,0 +1,18 @@ +## Step 5: Test your SSO configuration \{#test-sso} + +Lastly, you'll test your SSO configuration: + +- [Service provider (SP)-initiated login](#test-service-login) +- [Identity provider (idP)-initiated login](#test-identity-login) + +### Testing a service provider-initiated login \{#test-service-login} + +1. Navigate to your Dagster+ sign in page at `https://.dagster.cloud` + +2. Click the **Sign in with SSO** button. + +3. Initiate the login flow and address issues that arise, if any. + +### Testing an identity provider-initiated login \{#test-identity-login} + +{/* Copy for this section is in individual guides */} \ No newline at end of file diff --git a/docs/docs-beta/docs/todo.md b/docs/docs-beta/docs/todo.md new file mode 100644 index 0000000000000..25867d5cbee78 --- /dev/null +++ b/docs/docs-beta/docs/todo.md @@ -0,0 +1,3 @@ +--- +unlisted: true +--- diff --git a/docs/docs-beta/docs/tutorial/create-new-project.md b/docs/docs-beta/docs/tutorial/create-new-project.md new file mode 100644 index 0000000000000..25867d5cbee78 --- /dev/null +++ b/docs/docs-beta/docs/tutorial/create-new-project.md @@ -0,0 +1,3 @@ +--- +unlisted: true +--- diff --git a/docs/docs-beta/docs/tutorial/introduction.md b/docs/docs-beta/docs/tutorial/introduction.md new file mode 100644 index 0000000000000..fa350b858b24b --- /dev/null +++ b/docs/docs-beta/docs/tutorial/introduction.md @@ -0,0 +1,7 @@ +--- +title: "Introduction" +description: "Welcome to the Dagster documentation! If this is your first time developing a Dagster pipeline, read through this Getting Started section to get familiar with the basics. Otherwise, feel free to explore our guides and API documentation!" +slug: introduction +hide_title: false +unlisted: true +--- diff --git a/docs/docs-beta/docs/tutorial/tutorial-etl.md b/docs/docs-beta/docs/tutorial/tutorial-etl.md new file mode 100644 index 0000000000000..44e430d6c0b76 --- /dev/null +++ b/docs/docs-beta/docs/tutorial/tutorial-etl.md @@ -0,0 +1,62 @@ +--- +title: Build an ETL Pipeline +description: Learn how to build an ETL pipeline with Dagster +last_update: + date: 2024-08-10 + author: Pedram Navid +--- + +# Build your first ETL pipeline + +Welcome to this hands-on tutorial where you'll learn how to build an ETL pipeline with Dagster while exploring key parts of Dagster. +If you haven't already, complete the [Quick Start](/getting-started/quickstart) tutorial to get familiar with Dagster. + +## What you'll learn + +- Setting up a Dagster project with the recommended project structure +- Creating Assets and using Resources to connect to external systems +- Adding metadata to your assets +- Building dependencies between assets +- Running a pipeline by materializing assets +- Adding schedules, sensors, and partitions to your assets + +## Step 1: Set up your Dagster environment + +First, set up a new Dagster project. + +1. Open your terminal and create a new directory for your project: + + ```bash title="Create a new directory" + mkdir dagster-etl-tutorial + cd dagster-etl-tutorial + ``` + +2. Create a virtual environment and activate it: + + ```bash title="Create a virtual environment" + python -m venv venv + source venv/bin/activate + # On Windows, use `venv\Scripts\activate` + ``` + +3. Install Dagster and the required dependencies: + + ```bash title="Install Dagster and dependencies" + pip install dagster dagster-webserver pandas + ``` + +## What you've learned + +Congratulations! You've just built and run your first ETL pipeline with Dagster. You've learned how to: + +- Set up a Dagster project +- Define Software-Defined Assets for each step of your ETL process +- Use Dagster's UI to run and monitor your pipeline + +## Next steps + +To expand on this tutorial, you could: + +- Add more complex transformations +- Implement error handling and retries +- Create a schedule to run your pipeline periodically diff --git a/docs/docs-beta/docusaurus.config.ts b/docs/docs-beta/docusaurus.config.ts new file mode 100644 index 0000000000000..b7fdb23213b65 --- /dev/null +++ b/docs/docs-beta/docusaurus.config.ts @@ -0,0 +1,188 @@ +import {themes as prismThemes} from 'prism-react-renderer'; +import type {Config} from '@docusaurus/types'; +import type * as Preset from '@docusaurus/preset-classic'; + +const config: Config = { + title: 'Dagster Docs - Beta', + tagline: 'Dagster is a Python framework for building production-grade data platforms.', + url: 'https://docs.dagster.io', + favicon: 'img/favicon.ico', + baseUrl: '/', + onBrokenLinks: 'throw', + onBrokenMarkdownLinks: 'throw', + onBrokenAnchors: 'throw', + organizationName: 'dagster', + projectName: 'dagster', + markdown: { + mermaid: true, + }, + themes: ['@docusaurus/theme-mermaid'], + i18n: {defaultLocale: 'en', locales: ['en']}, + plugins: [ + require.resolve('docusaurus-plugin-sass'), + require.resolve('docusaurus-plugin-image-zoom'), + ], + themeConfig: { + // Algolia environment variables are not required during development + algolia: + process.env.NODE_ENV === 'development' + ? { + appId: 'ABC123', + apiKey: 'ABC123', + indexName: 'ABC123', + contextualSearch: false, + } + : { + appId: process.env.ALGOLIA_APP_ID, + apiKey: process.env.ALGOLIA_API_KEY, + indexName: process.env.ALGOLIA_INDEX_NAME, + contextualSearch: false, + }, + announcementBar: { + id: 'announcementBar', + content: `This is the preview of the new documentation site. If you have any feedback, please let us know on GitHub. The current documentation can be found at docs.dagster.io.`, + }, + colorMode: { + defaultMode: 'light', + disableSwitch: false, + respectPrefersColorScheme: true, + }, + prism: { + theme: prismThemes.github, + darkTheme: prismThemes.dracula, + additionalLanguages: ['diff', 'json', 'bash', 'docker'], + }, + zoom: { + selector: '.markdown > img, .tabs-container img ', + config: { + // options you can specify via https://github.com/francoischalifour/medium-zoom#usage + background: { + light: 'rgb(255, 255, 255)', + dark: 'rgb(50, 50, 50)', + }, + }, + }, + tableOfContents: { + minHeadingLevel: 2, + maxHeadingLevel: 4, + }, + navbar: { + hideOnScroll: true, + logo: { + alt: 'Dagster Logo', + src: 'img/dagster-docs-logo.svg', + srcDark: 'img/dagster-docs-logo-dark.svg', + href: '/', + }, + items: [ + { + label: 'Docs', + type: 'doc', + docId: 'intro', + position: 'left', + }, + { + label: 'Integrations', + type: 'doc', + docId: 'integrations/index', + position: 'left', + }, + { + label: 'Dagster+', + type: 'doc', + docId: 'dagster-plus', + position: 'left', + }, + { + label: 'API Docs', + type: 'doc', + docId: 'api/index', + position: 'left', + }, + //{ + // label: 'Changelog', + // type: 'doc', + // docId: 'changelog', + // position: 'right', + //}, + { + label: 'Feedback', + href: 'https://github.com/dagster-io/dagster/discussions/24816', + position: 'right', + className: 'feedback-nav-link', + }, + ], + }, + image: 'img/docusaurus-social-card.jpg', + docs: { + sidebar: { + autoCollapseCategories: false, + hideable: false, + }, + }, + + footer: { + logo: { + alt: 'Dagster Logo', + src: 'img/dagster_labs-primary-horizontal.svg', + srcDark: 'img/dagster_labs-reversed-horizontal.svg', + href: '/', + }, + links: [ + { + html: ` + + + + `, + }, + ], + copyright: `Copyright © ${new Date().getFullYear()} Dagster Labs`, + }, + } satisfies Preset.ThemeConfig, + + presets: [ + [ + '@docusaurus/preset-classic', + { + docs: { + sidebarPath: './sidebars.ts', + routeBasePath: '/', + editUrl: 'https://github.com/dagster-io/dagster/tree/master/docs/docs-beta', + }, + blog: false, + theme: { + customCss: [ + require.resolve('./node_modules/modern-normalize/modern-normalize.css'), + require.resolve('./src/styles/custom.scss'), + ], + }, + // https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-sitemap#ex-config + sitemap: { + //lastmod: 'date', + changefreq: 'weekly', + priority: 0.5, + ignorePatterns: ['/tags/**'], + filename: 'sitemap.xml', + createSitemapItems: async (params) => { + const {defaultCreateSitemapItems, ...rest} = params; + const items = await defaultCreateSitemapItems(rest); + //return items.filter((item) => !item.url.includes('/page/')); + return items; + }, + }, + } satisfies Preset.Options, + ], + ], +}; + +export default config; diff --git a/docs/docs-beta/package.json b/docs/docs-beta/package.json new file mode 100644 index 0000000000000..2934b527bed2e --- /dev/null +++ b/docs/docs-beta/package.json @@ -0,0 +1,83 @@ +{ + "name": "dagster-docs-beta", + "version": "0.0.0", + "private": true, + "scripts": { + "docusaurus": "docusaurus", + "start": "docusaurus start -p 3050", + "build": "docusaurus build", + "swizzle": "docusaurus swizzle", + "deploy": "docusaurus deploy", + "clear": "docusaurus clear", + "serve": "docusaurus serve", + "write-translations": "docusaurus write-translations", + "write-heading-ids": "docusaurus write-heading-ids", + "ts": "tsc -p . --noEmit", + "vale": "vale ./docs --ext=.md,.mdx", + "lint": "eslint . --ext=.tsx,.ts,.js,.md,.mdx --fix", + "lint-and-vale": "yarn run lint && yarn run vale", + "sync-api-docs": "/bin/sh scripts/vercel-sync-api-docs.sh" + }, + "dependencies": { + "@docusaurus/core": "3.5.2", + "@docusaurus/plugin-ideal-image": "^3.5.2", + "@docusaurus/preset-classic": "3.5.2", + "@docusaurus/theme-mermaid": "^3.5.2", + "@mdx-js/react": "^3.0.1", + "clsx": "^2.1.1", + "docusaurus-plugin-image-zoom": "^2.0.0", + "docusaurus-plugin-sass": "^0.2.5", + "modern-normalize": "^3.0.1", + "prism-react-renderer": "^2.4.0", + "raw-loader": "^4.0.2", + "react": "^18.3.1", + "react-dom": "^18.3.1" + }, + "devDependencies": { + "@babel/core": "^7.25.2", + "@babel/eslint-parser": "^7.25.1", + "@docusaurus/eslint-plugin": "^3.5.2", + "@docusaurus/module-type-aliases": "3.5.2", + "@docusaurus/theme-classic": "^3.5.2", + "@docusaurus/tsconfig": "3.5.2", + "@docusaurus/types": "3.5.2", + "@eslint/js": "^9.10.0", + "@types/babel__core": "^7.20.5", + "@types/node": "^22.5.4", + "@types/react": "^18.3.5", + "@typescript-eslint/parser": "^8.4.0", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-mdx": "^3.1.5", + "eslint-plugin-prettier": "^5.2.1", + "eslint-plugin-react": "^7.35.2", + "globals": "^15.9.0", + "prettier": "^3.3.3", + "prettier-eslint": "^16.3.0", + "remark-frontmatter": "^5.0.0", + "remark-mdx": "^3.0.1", + "sass": "^1.78.0", + "typescript": "~5.5.4", + "typescript-eslint": "^8.4.0", + "webpack": "^5.94.0" + }, + "browserslist": { + "production": [ + ">0.5%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 3 chrome version", + "last 3 firefox version", + "last 5 safari version" + ] + }, + "resolutions": { + "path-to-regexp@2.2.1": "3.3.0" + }, + "engines": { + "node": ">=18.0" + }, + "packageManager": "yarn@4.4.0" +} diff --git a/docs/docs-beta/scripts/vercel-sync-api-docs.sh b/docs/docs-beta/scripts/vercel-sync-api-docs.sh new file mode 100644 index 0000000000000..fc4240432665d --- /dev/null +++ b/docs/docs-beta/scripts/vercel-sync-api-docs.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# +# **NOTE: this script is intended to by used from Vercel only!** +# +# Description: Builds and synchronizes MDX API docs from Sphinx +# Usage: yarn sync-api-docs +# + +set -e + +cd .. + +export LC_ALL=C.UTF-8 + +curl -LsSf https://astral.sh/uv/install.sh | sh +source ~/.cargo/env + +uv python install 3.11 +uv venv +source .venv/bin/activate + +uv pip install tox +uvx tox -e sphinx-mdx +make mdx_copy diff --git a/docs/docs-beta/sidebars.ts b/docs/docs-beta/sidebars.ts new file mode 100644 index 0000000000000..b30f57fc6b33e --- /dev/null +++ b/docs/docs-beta/sidebars.ts @@ -0,0 +1,556 @@ +import type {SidebarsConfig} from '@docusaurus/plugin-content-docs'; +const sidebars: SidebarsConfig = { + docs: [ + { + type: 'category', + label: 'Getting Started', + collapsed: false, + items: ['intro', 'getting-started/quickstart', 'getting-started/installation'], + }, + { + type: 'category', + label: 'Tutorial', + collapsed: false, + items: ['tutorial/tutorial-etl'], + }, + { + type: 'category', + label: 'Build', + collapsed: false, + items: [ + { + type: 'category', + label: 'Create a pipeline', + link: {type: 'doc', id: 'guides/pipelines'}, + items: [ + 'guides/data-assets', + 'guides/metadata', + 'guides/partitioning', + 'guides/partition-dependencies', + 'guides/external-assets', + ], + }, + { + type: 'category', + label: 'Configure', + items: [ + 'guides/configuring-assets', + 'guides/asset-factories', + //'guides/resources', + 'guides/databases', + 'guides/apis', + 'guides/io-managers', + ], + }, + { + type: 'category', + label: 'Integrate', + items: ['guides/ingesting-data', 'guides/transform-dbt', 'guides/non-python'], + }, + ], + }, + { + type: 'category', + label: 'Automate', + collapsed: false, + items: [ + 'guides/schedules', + 'guides/sensors', + 'guides/asset-sensors', + //'guides/declarative-automation', + ], + }, + { + type: 'category', + label: 'Test', + collapsed: false, + items: [ + 'guides/asset-checks', + 'guides/data-freshness-testing', + 'guides/unit-tests-assets-and-ops', + ], + }, + { + type: 'category', + label: 'Deploy', + collapsed: false, + items: [ + { + type: 'category', + label: 'Self-hosting Dagster', + items: ['guides/kubernetes', 'guides/docker'], + }, + { + type: 'category', + label: 'Dagster Plus', + items: [ + 'guides/dagster-plus', + //'guides/self-hosted-to-dagster-plus' + ], + }, + //'guides/secrets', + //'guides/code-locations', + ], + }, + //{ + // type: 'category', + // label: 'Understand', + // items: [ + // { + // type: 'category', + // label: 'Assets', + // items: [ + // { + // type: 'autogenerated', + // dirName: 'concepts/assets', + // }, + // ], + // }, + // { + // type: 'category', + // label: 'Automation and Scheduling', + // items: [ + // { + // type: 'autogenerated', + // dirName: 'concepts/automation', + // }, + // ], + // }, + // { + // type: 'doc', + // label: 'Partitions', + // id: 'concepts/partitions', + // }, + // { + // type: 'doc', + // label: 'Resources', + // id: 'concepts/resources', + // }, + // { + // type: 'doc', + // label: 'I/O managers', + // id: 'concepts/io-managers', + // }, + // { + // type: 'category', + // label: 'Ops and jobs', + // link: { + // type: 'doc', + // id: 'concepts/ops-jobs', + // }, + // items: [ + // { + // type: 'autogenerated', + // dirName: 'concepts/ops-jobs', + // }, + // ], + // }, + // { + // type: 'category', + // label: 'Execution', + // link: { + // type: 'doc', + // id: 'concepts/execution', + // }, + // items: [ + // { + // type: 'autogenerated', + // dirName: 'concepts/execution', + // }, + // ], + // }, + // ], + //}, + { + type: 'category', + label: 'About', + collapsed: false, + items: [ + { + type: 'autogenerated', + dirName: 'about', + }, + ], + }, + ], + integrations: [ + { + type: 'category', + label: 'Categories', + collapsible: false, + items: [ + { + type: 'category', + label: 'ETL', + items: [ + 'integrations/airbyte', + 'integrations/sdf', + 'integrations/fivetran', + 'integrations/dlt', + 'integrations/census', + 'integrations/dbt', + 'integrations/dbt-cloud', + 'integrations/sling', + 'integrations/hightouch', + 'integrations/meltano', + ], + }, + { + type: 'category', + label: 'Storage', + items: [ + 'integrations/snowflake', + 'integrations/gcp/bigquery', + 'integrations/aws/athena', + 'integrations/aws/s3', + 'integrations/duckdb', + 'integrations/deltalake', + 'integrations/aws/redshift', + 'integrations/gcp/gcs', + 'integrations/azure-adls2', + 'integrations/lakefs', + ], + }, + { + type: 'category', + label: 'Compute', + items: [ + 'integrations/kubernetes', + 'integrations/spark', + 'integrations/aws/glue', + 'integrations/jupyter', + 'integrations/aws/emr', + 'integrations/databricks', + 'integrations/aws/lambda', + 'integrations/docker', + 'integrations/shell', + 'integrations/gcp/dataproc', + ], + }, + { + type: 'category', + label: 'BI', + items: ['integrations/looker'], + }, + { + type: 'category', + label: 'Monitoring', + items: ['integrations/prometheus', 'integrations/datadog', 'integrations/aws/cloudwatch'], + }, + { + type: 'category', + label: 'Alerting', + items: [ + 'integrations/slack', + 'integrations/twilio', + 'integrations/pagerduty', + 'integrations/microsoft-teams', + ], + }, + { + type: 'category', + label: 'Metadata', + items: [ + 'integrations/secoda', + 'integrations/pandera', + 'integrations/open-metadata', + 'integrations/pandas', + ], + }, + { + type: 'category', + label: 'Other', + items: [ + 'integrations/cube', + 'integrations/aws/secretsmanager', + 'integrations/openai', + 'integrations/ssh-sftp', + 'integrations/github', + 'integrations/aws/ssm', + 'integrations/aws/ecr', + 'integrations/wandb', + 'integrations/hashicorp', + ], + }, + ], + }, + { + type: 'category', + label: 'Community Supported', + items: [ + 'integrations/secoda', + 'integrations/cube', + 'integrations/sdf', + 'integrations/open-metadata', + 'integrations/census', + 'integrations/deltalake', + 'integrations/hightouch', + 'integrations/wandb', + 'integrations/meltano', + 'integrations/hashicorp', + 'integrations/lakefs', + ], + }, + { + type: 'category', + label: 'All Integrations', + collapsed: true, + // link: {type: 'doc', id: 'integrations'}, + items: [ + { + type: 'autogenerated', + dirName: 'integrations', + }, + ], + }, + ], + dagsterPlus: [ + { + type: 'category', + label: 'Getting started', + collapsible: false, + className: 'category-non-collapsible', + items: [ + { + type: 'doc', + id: 'dagster-plus/whats-dagster-plus', + }, + { + type: 'doc', + id: 'dagster-plus/getting-started', + }, + ], + }, + { + type: 'category', + label: 'Features', + collapsible: false, + items: [ + { + type: 'category', + label: 'Insights', + link: { + type: 'doc', + id: 'dagster-plus/insights', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/insights', + }, + ], + }, + { + type: 'category', + label: 'Branch Deployments (CI)', + link: { + type: 'doc', + id: 'dagster-plus/deployment/branch-deployments', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/branch-deployments', + }, + ], + }, + { + type: 'category', + label: 'Alerts', + link: { + type: 'doc', + id: 'dagster-plus/deployment/alerts', + }, + items: [ + { + type: 'doc', + label: 'Manage alerts in the UI', + id: 'dagster-plus/deployment/alerts/ui', + }, + { + type: 'doc', + label: 'Manage alerts with the CLI', + id: 'dagster-plus/deployment/alerts/cli', + }, + { + type: 'doc', + label: 'Email', + id: 'dagster-plus/deployment/alerts/email', + }, + { + type: 'doc', + label: 'Microsoft Teams', + id: 'dagster-plus/deployment/alerts/microsoft-teams', + }, + { + type: 'doc', + label: 'PagerDuty', + id: 'dagster-plus/deployment/alerts/pagerduty', + }, + { + type: 'doc', + label: 'Slack', + id: 'dagster-plus/deployment/alerts/slack', + }, + ], + }, + { + type: 'category', + label: 'Authentication & access control', + items: [ + { + type: 'category', + label: 'Role-based Access Control', + link: { + type: 'doc', + id: 'dagster-plus/access/rbac', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/access/rbac', + }, + ], + }, + { + type: 'category', + label: 'Single Sign-on (SSO)', + items: [ + 'dagster-plus/access/authentication/azure-ad-sso', + 'dagster-plus/access/authentication/google-workspace-sso', + 'dagster-plus/access/authentication/okta-sso', + 'dagster-plus/access/authentication/onelogin-sso', + 'dagster-plus/access/authentication/pingone-sso', + ], + }, + { + type: 'category', + label: 'SCIM provisioning', + items: [ + { + type: 'link', + label: 'Azure Active Directory', + href: 'https://learn.microsoft.com/en-us/azure/active-directory/saas-apps/dagster-cloud-provisioning-tutorial', + }, + { + type: 'doc', + label: 'Okta', + id: 'dagster-plus/access/authentication/okta-scim', + }, + ], + }, + ], + }, + { + type: 'doc', + id: 'dagster-plus/saved-views', + }, + ], + }, + { + type: 'category', + label: 'Deployment', + collapsible: false, + items: [ + { + type: 'category', + label: 'Serverless', + link: { + type: 'doc', + id: 'dagster-plus/deployment/serverless', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/serverless', + }, + ], + }, + { + type: 'category', + label: 'Hybrid', + link: { + type: 'doc', + id: 'dagster-plus/deployment/hybrid', + }, + items: [ + { + type: 'doc', + label: 'Tokens', + id: 'dagster-plus/deployment/hybrid/tokens', + }, + { + type: 'category', + label: 'Agents', + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/hybrid/agents', + }, + ], + }, + ], + }, + { + type: 'category', + label: 'CI/CD', + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/branch-deployments', + }, + ], + }, + { + type: 'category', + label: 'Code locations', + link: { + type: 'doc', + id: 'dagster-plus/deployment/code-locations', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/code-locations', + }, + ], + }, + { + type: 'category', + label: 'Environment variables', + link: { + type: 'doc', + id: 'dagster-plus/deployment/environment-variables', + }, + items: [ + { + type: 'autogenerated', + dirName: 'dagster-plus/deployment/environment-variables', + }, + ], + }, + { + type: 'doc', + label: 'Settings', + id: 'dagster-plus/settings', + }, + ], + }, + ], + api: [ + { + type: 'category', + label: 'API Docs', + link: {type: 'doc', id: 'api/index'}, + collapsible: false, + items: [ + { + type: 'autogenerated', + dirName: 'api', + }, + ], + }, + ], +}; + +export default sidebars; diff --git a/docs/docs-beta/src/components/Cards.tsx b/docs/docs-beta/src/components/Cards.tsx new file mode 100644 index 0000000000000..e2c855a893cfa --- /dev/null +++ b/docs/docs-beta/src/components/Cards.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import Link from '@docusaurus/Link'; +import Heading from '@theme/Heading'; +interface CardProps { + title: string; + imagePath?: string; + href: string; + children: React.ReactNode; +} + +const Card: React.FC = ({title, imagePath, href, children}) => ( + + {imagePath && ( + {title} + )} + {title} +

      {children}

      + +); + +interface CardGroupProps { + cols: number; + children: React.ReactNode; +} + +const CardGroup: React.FC = ({cols, children}) => ( +
      {children}
      +); + +export {Card, CardGroup}; diff --git a/docs/docs-beta/src/components/CodeExample.tsx b/docs/docs-beta/src/components/CodeExample.tsx new file mode 100644 index 0000000000000..05a2577d8fa30 --- /dev/null +++ b/docs/docs-beta/src/components/CodeExample.tsx @@ -0,0 +1,48 @@ +import React from 'react'; +import CodeBlock from '@theme/CodeBlock'; + +interface CodeExampleProps { + filePath: string; + language?: string; + title?: string; +} + +const CodeExample: React.FC = ({filePath, language, title}) => { + const [content, setContent] = React.useState(''); + const [error, setError] = React.useState(null); + + language = language || 'python'; + + React.useEffect(() => { + // Adjust the import path to start from the docs directory + import(`!!raw-loader!/../../examples/docs_beta_snippets/docs_beta_snippets/${filePath}`) + .then((module) => { + const lines = module.default.split('\n').map((line) => { + return line.replaceAll(/#.*?noqa.*?$/g, ''); + }); + const mainIndex = lines.findIndex((line) => line.trim().startsWith('if __name__ == ')); + const strippedContent = + mainIndex !== -1 ? lines.slice(0, mainIndex).join('\n') : lines.join('\n'); + setContent(strippedContent); + setError(null); + }) + .catch((error) => { + console.error(`Error loading file: ${filePath}`, error); + setError( + `Failed to load file: ${filePath}. Please check if the file exists and the path is correct.`, + ); + }); + }, [filePath]); + + if (error) { + return
      {error}
      ; + } + + return ( + + {content || 'Loading...'} + + ); +}; + +export default CodeExample; diff --git a/docs/docs-beta/src/components/PyObject.tsx b/docs/docs-beta/src/components/PyObject.tsx new file mode 100644 index 0000000000000..14ea55f7e69e3 --- /dev/null +++ b/docs/docs-beta/src/components/PyObject.tsx @@ -0,0 +1,40 @@ +import Link from '@docusaurus/Link'; +import React from 'react'; + +export const SearchIndexContext = React.createContext(null); + +export const PyObject: React.FunctionComponent<{ + module: string; + object: string; + method?: string; + displayText?: string; + pluralize?: boolean; + decorator?: boolean; +}> = ({object, method, displayText, pluralize = false, decorator = false}) => { + let textValue = displayText || object; + if (pluralize) { + textValue += 's'; + } + if (decorator) { + textValue = '@' + textValue; + } + if (method) { + textValue += '.' + method; + } + + const handleClick = (e: React.MouseEvent) => { + e.preventDefault(); + alert('PyObject not implemented yet'); + }; + + return ( + + {textValue} + + ); +}; diff --git a/docs/docs-beta/src/pages/index.module.css b/docs/docs-beta/src/pages/index.module.css new file mode 100644 index 0000000000000..9f71a5da775bd --- /dev/null +++ b/docs/docs-beta/src/pages/index.module.css @@ -0,0 +1,23 @@ +/** + * CSS files with the .module.css suffix will be treated as CSS modules + * and scoped locally. + */ + +.heroBanner { + padding: 4rem 0; + text-align: center; + position: relative; + overflow: hidden; +} + +@media screen and (max-width: 996px) { + .heroBanner { + padding: 2rem; + } +} + +.buttons { + display: flex; + align-items: center; + justify-content: center; +} diff --git a/docs/docs-beta/src/styles/colors.scss b/docs/docs-beta/src/styles/colors.scss new file mode 100644 index 0000000000000..98ac1f3c495ce --- /dev/null +++ b/docs/docs-beta/src/styles/colors.scss @@ -0,0 +1,125 @@ +:root { + /* Dagster Color Palette */ + --dagster-gray-990: rgba(3, 6, 21, 1); + --dagster-gray-950: rgba(13, 17, 33, 1); + --dagster-gray-900: rgba(23, 28, 44, 1); + --dagster-gray-850: rgba(33, 39, 56, 1); + --dagster-gray-800: rgba(43, 50, 68, 1); + --dagster-gray-750: rgba(53, 61, 80, 1); + --dagster-gray-700: rgba(63, 72, 91, 1); + --dagster-gray-650: rgba(73, 83, 103, 1); + --dagster-gray-600: rgba(83, 94, 115, 1); + --dagster-gray-550: rgba(93, 105, 126, 1); + --dagster-gray-500: rgba(103, 116, 138, 1); + --dagster-gray-450: rgba(118, 130, 150, 1); + --dagster-gray-400: rgba(133, 144, 161, 1); + --dagster-gray-350: rgba(149, 158, 173, 1); + --dagster-gray-300: rgba(164, 172, 185, 1); + --dagster-gray-250: rgba(179, 185, 197, 1); + --dagster-gray-200: rgba(194, 199, 208, 1); + --dagster-gray-150: rgba(209, 213, 220, 1); + --dagster-gray-100: rgba(225, 227, 232, 1); + --dagster-gray-50: rgba(240, 241, 243, 1); + --dagster-gray-10: rgba(247, 248, 249, 1); + --dagster-gray-translucent-12: rgba(103, 116, 138, 0.12); + --dagster-gray-translucent-15: rgba(103, 116, 138, 0.15); + --dagster-gray-translucent-20: rgba(103, 116, 138, 0.2); + --dagster-gray-translucent-25: rgba(103, 116, 138, 0.25); + --dagster-gray-translucent-30: rgba(103, 116, 138, 0.3); + --dagster-white: rgba(255, 255, 255, 1); + + --dagster-blue-990: rgba(14, 15, 52, 1); + --dagster-blue-950: rgba(17, 18, 62, 1); + --dagster-blue-900: rgba(25, 24, 82, 1); + --dagster-blue-800: rgba(33, 30, 102, 1); + --dagster-blue-700: rgba(48, 43, 141, 1); + --dagster-blue-600: rgba(64, 55, 181, 1); + --dagster-blue-500: rgba(79, 67, 221, 1); + --dagster-blue-400: rgba(114, 105, 228, 1); + --dagster-blue-300: rgba(149, 142, 235, 1); + --dagster-blue-200: rgba(185, 180, 241, 1); + --dagster-blue-100: rgba(220, 217, 248, 1); + --dagster-blue-50: rgba(237, 236, 252, 1); + --dagster-blue-10: rgba(246, 246, 253, 1); + --dagster-blue-translucent-12: rgba(79, 67, 221, 0.12); + --dagster-blue-translucent-15: rgba(79, 67, 221, 0.15); + --dagster-blue-translucent-20: rgba(79, 67, 221, 0.2); + --dagster-blue-translucent-25: rgba(79, 67, 221, 0.25); + --dagster-blue-translucent-30: rgba(79, 67, 221, 0.3); + + --dagster-yellow-990: rgba(36, 29, 26, 1); + --dagster-yellow-950: rgba(47, 36, 27, 1); + --dagster-yellow-900: rgba(70, 51, 30, 1); + --dagster-yellow-800: rgba(93, 66, 32, 1); + --dagster-yellow-700: rgba(138, 97, 37, 1); + --dagster-yellow-600: rgba(184, 127, 42, 1); + --dagster-yellow-500: rgba(229, 157, 47, 1); + --dagster-yellow-400: rgba(234, 177, 89, 1); + --dagster-yellow-300: rgba(239, 196, 130, 1); + --dagster-yellow-200: rgba(245, 216, 172, 1); + --dagster-yellow-100: rgba(250, 235, 213, 1); + --dagster-yellow-50: rgba(252, 245, 234, 1); + --dagster-yellow-10: rgba(252, 245, 234, 1); + --dagster-yellow-translucent-12: rgba(229, 157, 47, 0.12); + --dagster-yellow-translucent-15: rgba(229, 157, 47, 0.15); + --dagster-yellow-translucent-20: rgba(229, 157, 47, 0.2); + --dagster-yellow-translucent-25: rgba(229, 157, 47, 0.25); + --dagster-yellow-translucent-30: rgba(229, 157, 47, 0.3); + + --dagster-red-990: rgba(33, 15, 27, 1); + --dagster-red-950: rgba(44, 18, 28, 1); + --dagster-red-900: rgba(64, 24, 31, 1); + --dagster-red-800: rgba(85, 30, 34, 1); + --dagster-red-700: rgba(127, 42, 41, 1); + --dagster-red-600: rgba(168, 54, 47, 1); + --dagster-red-500: rgba(210, 66, 53, 1); + --dagster-red-400: rgba(219, 104, 93, 1); + --dagster-red-300: rgba(228, 142, 134, 1); + --dagster-red-200: rgba(237, 179, 174, 1); + --dagster-red-100: rgba(246, 217, 215, 1); + --dagster-red-50: rgba(250, 236, 235, 1); + --dagster-red-10: rgba(253, 246, 245, 1); + --dagster-red-translucent-12: rgba(210, 66, 53, 0.12); + --dagster-red-translucent-15: rgba(210, 66, 53, 0.15); + --dagster-red-translucent-20: rgba(210, 66, 53, 0.2); + --dagster-red-translucent-25: rgba(210, 66, 53, 0.25); + --dagster-red-translucent-30: rgba(210, 66, 53, 0.3); + + --dagster-green-990: rgba(9, 36, 38, 1); + --dagster-green-950: rgba(11, 46, 43, 1); + --dagster-green-900: rgba(15, 65, 54, 1); + --dagster-green-800: rgba(20, 85, 65, 1); + --dagster-green-700: rgba(28, 125, 86, 1); + --dagster-green-600: rgba(130, 224, 179, 1); + --dagster-green-500: rgba(46, 204, 129, 1); + --dagster-green-400: rgba(88, 214, 154, 1); + --dagster-green-300: rgba(130, 224, 179, 1); + --dagster-green-200: rgba(171, 235, 205, 1); + --dagster-green-100: rgba(213, 245, 230, 1); + --dagster-green-50: rgba(234, 250, 242, 1); + --dagster-green-10: rgba(245, 252, 249, 1); + --dagster-green-translucent-12: rgba(46, 204, 129, 0.12); + --dagster-green-translucent-15: rgba(46, 204, 129, 0.15); + --dagster-green-translucent-20: rgba(46, 204, 129, 0.2); + --dagster-green-translucent-25: rgba(46, 204, 129, 0.25); + --dagster-green-translucent-30: rgba(46, 204, 129, 0.3); + + --dagster-cyan-990: rgba(13, 32, 50, 1); + --dagster-cyan-950: rgba(17, 41, 60, 1); + --dagster-cyan-900: rgba(24, 58, 78, 1); + --dagster-cyan-800: rgba(31, 76, 97, 1); + --dagster-cyan-700: rgba(46, 110, 135, 1); + --dagster-cyan-600: rgba(60, 145, 172, 1); + --dagster-cyan-500: rgba(75, 180, 210, 1); + --dagster-cyan-400: rgba(111, 195, 219, 1); + --dagster-cyan-300: rgba(147, 210, 228, 1); + --dagster-cyan-200: rgba(183, 225, 237, 1); + --dagster-cyan-100: rgba(219, 240, 246, 1); + --dagster-cyan-50: rgba(237, 247, 250, 1); + --dagster-cyan-10: rgba(246, 251, 253, 1); + --dagster-cyan-translucent-12: rgba(75, 180, 210, 0.12); + --dagster-cyan-translucent-15: rgba(75, 180, 210, 0.15); + --dagster-cyan-translucent-20: rgba(75, 180, 210, 0.2); + --dagster-cyan-translucent-25: rgba(75, 180, 210, 0.25); + --dagster-cyan-translucent-30: rgba(75, 180, 210, 0.3); +} diff --git a/docs/docs-beta/src/styles/components/_card.scss b/docs/docs-beta/src/styles/components/_card.scss new file mode 100644 index 0000000000000..1b1079065bba6 --- /dev/null +++ b/docs/docs-beta/src/styles/components/_card.scss @@ -0,0 +1,3 @@ +.card { + border-radius: 0px; +} diff --git a/docs/docs-beta/src/styles/custom.scss b/docs/docs-beta/src/styles/custom.scss new file mode 100644 index 0000000000000..6f0e8ffe76f19 --- /dev/null +++ b/docs/docs-beta/src/styles/custom.scss @@ -0,0 +1,672 @@ +/** + * Any CSS included here will be global. The classic template + * bundles Infima by default. Infima is a CSS framework designed to + * work well for content-centric websites. + */ +@use 'colors'; // Import the theme colors module +@use 'theme-globals'; //Global styles used across both themes +@use 'theme-light'; // Import the light theme module +@use 'theme-dark'; // Import the light theme module + +/* You can override the default Infima variables here. */ +/* Some styles heavily influenced from: https://github.com/electron/website/blob/main/src/css/custom.scss */ + +html { + font-family: var(--ifm-font-family-base); + background: var(--theme-color-background-default); +} +.container { + max-width: none; +} + +article { + max-width: 890px; + margin: 0 auto; +} + +hr { + height: 1px; +} + +.navbar { + box-shadow: none; + border-bottom: 1px solid var(--theme-color-keyline); + + .navbar__items { + max-height: 100%; + } +} +.navbar__inner { + max-width: 100% !important; +} + +.navbar__brand { + margin-right: 120px; +} + +.navbar__item { + margin-right: 16px; + padding-left: 0; + padding-right: 0; +} + +.navbar__link { + font-weight: 500 !important; +} + +.breadcrumbs { + display: flex; + flex-wrap: wrap; + align-items: center; + color: var(--theme-color-text-light); + + .breadcrumbs__item:first-child { + display: none; + } + .breadcrumbs__item { + color: var(--theme-color-text-light); + + a.breadcrumbs__link { + padding: 0; + background: none; + &:hover { + text-decoration: underline; + } + } + span.breadcrumbs__link { + padding: 0; + background: none; + } + &--active { + color: var(--theme-color-text-default); + font-weight: var(--ifm-font-weight-semibold); + } + } +} + +/* Main content */ +.theme-doc-markdown { + margin: 0 auto; + margin-top: 1rem; + + img { + border: 1px solid var(--theme-color-keyline); + border-radius: 8px; + overflow: hidden; + } +} + +/* Custom code for PyObject */ +a.pyobject { + color: var(--theme-color-text-default); +} + +.navbar { + &--dark { + --ifm-navbar-link-color: var(--theme-color-text-default); + } + + &__inner { + max-width: 1400px; + margin: 0 0; + } + + &__item { + border: 1px; + } + + &__link { + font-weight: var(--ifm-font-weight-normal); + border-bottom: 2px solid transparent; + + &--active { + color: var(--theme-color-accent-blue); + border-bottom-color: var(--theme-color-accent-blue); + } + + &:hover { + color: var(--theme-color-accent-blue); + } + } +} + +.DocSearch-Button { + border-radius: 8px !important; + background: var(--theme-color-background-default) !important; + border: 1px solid var(--theme-color-border-default) !important; + margin-left: 12px !important; + + .DocSearch-Button .DocSearch-Search-Icon { + width: 18px !important; + } + + &:hover, + &:focus { + background: var(--theme-color-background-light) !important; + box-shadow: none !important; + border: 1px solid var(--theme-color-text-lighter) !important; + } +} +.DocSearch-Hit-source { + color: var(--theme-color-text-light) !important; +} + +.DocSearch-MagnifierLabel { + color: var(--theme-color-text-default) !important; + width: 20px; +} + +.DocSearch-Hits mark { + color: var(--theme-color-accent-blue) !important; +} + +.DocSearch-Cancel { + color: var(--theme-color-text-light) !important; +} + +.footer { + --ifm-footer-background-color: var(--theme-color-background-light); + + .footer__logo { + max-width: 80px; + } +} + +.markdown { + line-height: 150%; + + code { + font-size: var(--ifm-code-font-size); + line-height: 135%; + } + + a { + font-weight: var(--ifm-font-weight-semibold); + code { + font-weight: var(--ifm-font-weight-bold); + } + } + + h1, + h1:first-child, + h2, + h3, + h4, + h5, + h6 { + --ifm-h1-font-size: 2rem; + --ifm-h2-font-size: 1.5rem; + --ifm-h3-font-size: 1.3rem; + --ifm-h4-font-size: 1rem; + --ifm-h5-font-size: 0.8rem; + } + + // Emulate horizontal rule above h2 headers + h2 { + border-top: 1px; + border-top-style: solid; + border-top-color: var(--theme-color-keyline); + margin-top: 0px; + padding-top: calc(var(--ifm-heading-vertical-rhythm-bottom) * var(--ifm-leading)); + } + + // We want this to only apply to inline code + :not(pre):not(h2):not(h3):not(h4):not(h5):not(h6) > code { + background-color: var(--theme-color-background-blue); + border: 0.5px solid var(--theme-color-keyline); + padding: 0.1rem; + } + + // don't apply --dagster-inline-code colors to admonitions + // as well + .admonition { + code { + color: inherit; + } + } +} + +.button { + &--outline { + border: 1px solid; + color: var(--ifm-color-primary); + &:hover { + color: var(--ifm-color-primary-dark); + } + } +} + +// sidebar +.menu { + padding-right: 0.5rem !important; + &__link { + font-size: 14px; + font-weight: var(--ifm-font-weight-normal); + color: var(--theme-color-text-light); + &:hover { + color: var(--theme-color-text-default); + background-color: var(--theme-color-background-gray); + } + &--active { + font-weight: var(--ifm-font-weight-normal); + color: var(--theme-color-text-default); + background-color: var(--theme-color-background-gray); + &:hover { + background-color: var(--theme-color-background-blue); + color: var(--theme-color-text-default); + } + } + } +} + +.menu__link--sublist-caret { + &:after { + background: var(--ifm-menu-link-sublist-icon) 50% / 1.25rem 1.25rem; + content: ''; + display: inline-block; + height: 1.25rem; + width: 1.25rem; + transition: transform var(--ifm-transition-fast) linear; + } +} + +.menu__list-item-collapsible { + .menu__caret:before { + background: var(--ifm-menu-link-sublist-icon) 50% / 1.25rem 1.25rem; + } + + &[aria-expanded='true'] { + .menu__link--sublist-caret:after { + transform: rotate(180deg); + } + } +} + +.menu__list-item:not(:first-child) { + margin-top: 0; +} + +.theme-doc-sidebar-item-category-level-1 { + padding-top: 4px; + .menu__list { + border-left: 1px solid var(--theme-color-keyline); + margin-left: 12px; + padding-left: 4px; + margin-top: 0; + } + .menu__list-item-collapsible a { + font-size: 16px; + } +} + +.theme-doc-sidebar-item-category-level-2 { + .menu__list-item-collapsible a { + font-size: 14px; + font-weight: 400; + color: var(--theme-color-text-light); + } +} + +.markdown .table-of-contents { + li { + list-style: none; + padding-top: 4px; + line-height: 1; + } + li a { + font-weight: var(--ifm-font-weight-normal); + } +} +.table-of-contents { + &__link { + position: relative; + } + &__link:hover { + color: var(--theme-color-text-default); + } + &__link--active { + font-weight: 500; + padding-left: 12px; + transition: padding-left 0.2s; + } + &__link--active:before { + content: ''; + position: absolute; + top: 0px; + left: 0px; + width: 2px; + height: 100%; + background: var(--theme-color-accent-blue); + display: block; + } +} +.pagination-nav { + max-width: 890px; + margin: 0 auto; + + &__link { + border: 0; + border-radius: 8px; + border-top: 1px solid var(--dagster-color-gray-300); + background-color: var(--theme-color-background-default); + font-weight: var(--ifm-font-weight-normal); + transition: background-color 0.3s; + border: 1px solid var(--theme-color-keyline); + } + &__link:hover { + color: var(--dagster-background-default); + background-color: var(--theme-color-background-light); + border: 1px solid var(--theme-color-border-hover); + } + .pagination-nav__label { + font-weight: var(--ifm-font-weight-normal); + } +} + +table { + thead { + tr { + th { + font-size: 13px; + text-align: left; + line-height: 1.1; + font-weight: 700; + border: 1px solid var(--theme-color-keyline); + border-bottom: 2px solid var(--theme-color-keyline); + } + } + } + tbody { + tr { + td { + vertical-align: top; + font-size: 13px; + line-height: 1.2; + border: 1px solid var(--theme-color-keyline); + } + code { + background-color: var(--ifm-code-background) !important; + border: 0.1rem solid rgba(0, 0, 0, 0.1) !important; + padding: 0.1rem; + } + } + } +} + +.tabs { + border-bottom: 1px solid var(--theme-color-keyline); +} + +// NotFound Swizzle +.not-found-title { + font-weight: 600; + font-size: 12rem; + color: var(--theme-color-text-blue); + text-shadow: 0.5rem 0.5rem var(--theme-color-accent-blue); +} + +.hover-wiggle { + &:hover { + animation: wiggle 2s linear infinite; + } +} + +.not-found-links { + display: flex; + flex-direction: row; + justify-content: space-between; + border: 1px solid var(--theme-color-keyline); + padding: 1rem; + + a { + text-align: center; + font-weight: var(--ifm-font-weight-normal); + + &:hover { + color: var(--theme-color-accent-blue); + } + } +} + +.footer__title { + display: none; +} +.footer__links { + margin-bottom: 0; +} +.footer__bottom { + text-align: left; +} +.footer__link-item { + display: flex; + justify-content: space-between; + + .footer__items { + display: flex; + flex-direction: row; + gap: 12px; + font-size: 13px; + a { + color: var(--theme-color-text-light); + transition: color 0.2s; + &:hover { + color: var(--theme-color-text-default); + } + } + } + .footer__items--right { + a { + opacity: 0.5; + transition: opacity 0.2s; + + &:hover { + opacity: 0.7; + } + } + } +} + +.footer .footer__logo { + max-width: 140px; +} + +.footer__copyright { + font-size: 11px; + color: var(--theme-color-text-light); +} + +@media (max-width: 950px) { + .navbar__items .navbar__brand { + margin: 0 auto; + } + + .navbar__items .navbar__brand { + margin: 0 auto; + } + + .footer__link-item { + display: flex; + justify-content: space-between; + flex-direction: column; + align-items: center; + } + .footer__bottom { + text-align: center; + } +} + +@keyframes wiggle { + 0% { + transform: rotate(0deg); + } + 25% { + transform: rotate(3deg); + } + 75% { + transform: rotate(-3deg); + } + 100% { + transform: rotate(0deg); + } +} + +/* API Docs */ +dl { + padding: 4px 0px 0px 4px; + border: 1px solid var(--theme-color-keyline); + font-weight: 200; + background-color: var(--theme-color-background-blue); + line-height: 1.2; + font-size: 13px; + border-radius: 4px; +} + +dt { + box-shadow: 0px 1px 0px var(--theme-color-keyline); + font-weight: 600; + font-size: 15px; + padding-bottom: 0px; +} + +dd { + background-color: var(--theme-color-background-light); + font-weight: 400; + padding: 4px; + margin-left: -2px; + line-height: 1.4; +} + +dd p { + margin: 0; +} + +dd code { + background-color: var(--theme-color-background-default); + border: 1px solid var(--theme-color-keyline); + border-radius: 4px; + padding: 0.1rem; +} + +/* Card and CardGroup styles */ +.card-group { + display: grid; + gap: 1rem; + margin-bottom: 2rem; + + &.cols-2 { + grid-template-columns: repeat(2, 1fr); + } + &.cols-3 { + grid-template-columns: repeat(3, 1fr); + } + &.cols-4 { + grid-template-columns: repeat(4, 1fr); + } + + @media (max-width: 768px) { + grid-template-columns: 1fr; + } +} + +.card { + display: block; + background-color: var(--theme-color-background-light); + border: 1px solid var(--theme-color-keyline); + border-radius: 8px; + padding: 1.5rem; + transition: all 0.3s ease; + text-decoration: none; + color: inherit; + + &:hover { + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); + transform: translateY(-2px); + text-decoration: none; + } + + h3 { + font-size: 1.2rem; + margin-top: 0; + margin-bottom: 0.5rem; + font-weight: 600; + } + + i { + font-size: 1.5rem; + margin-bottom: 1rem; + color: var(--theme-color-accent-blue); + } + + p { + font-size: 0.9rem; + color: var(--theme-color-text-light); + margin-bottom: 1rem; + font-weight: 500; + } + + a { + display: inline-block; + color: var(--theme-color-accent-blue); + text-decoration: none; + font-weight: 500; + font-size: 0.9rem; + + &:hover { + text-decoration: underline; + } + } +} + +/** + * api docs: inline flag + * + * available options: + * - info + * - danger + * - warning + */ + +span { + &.flag { + font-weight: var(--ifm-font-weight-semibold); + border-radius: 8px; + padding-left: 4px; + padding-right: 4px; + } + + &.flag-info { + background-color: var(--theme-color-background-cyan); + color: var(--theme-color-text-cyan); + } + + &.flag-danger { + background-color: var(--theme-color-background-red); + color: var(--theme-color-text-red); + } + + &.flag-warning { + background-color: var(--theme-color-background-yellow); + color: var(--theme-color-text-yellow); + } +} + +/* Ability to hide sidebar items (e.g. index.md) */ +/* https://docusaurus.canny.io/feature-requests/p/hiding-parts-of-docs-in-autogenerated-sidebar */ +.hidden { + display: none !important; +} + +/* Announcement bar */ +div[class^='announcementBar_'] { + color: var(--theme-color-text); + padding-top: 0.25rem; + padding-bottom: 0.25rem; + background-color: var(--theme-color-background-yellow); +} + +/* Feedback navigation item */ +.feedback-nav-link { + color: var(--theme-color-text-yellow); + text-decoration: underline; +} + diff --git a/docs/docs-beta/src/styles/theme-dark.scss b/docs/docs-beta/src/styles/theme-dark.scss new file mode 100644 index 0000000000000..19a14df9d300d --- /dev/null +++ b/docs/docs-beta/src/styles/theme-dark.scss @@ -0,0 +1,73 @@ +:root[data-theme='dark'] { + //Base Theme Colors + --theme-color-background-default: var(--dagster-gray-990); + --theme-color-background-light: var(--dagster-gray-950); + --theme-color-background-lighter: var(--dagster-gray-850); + --theme-color-background-red: var(--dagster-red-translucent-20); + --theme-color-background-green: var(--dagster-green-translucent-20); + --theme-color-background-blue: var(--dagster-blue-translucent-20); + --theme-color-background-yellow: var(--dagster-yellow-translucent-20); + --theme-color-background-cyan: var(--dagster-cyan-translucent-20); + --theme-color-background-gray: var(--dagster-gray-translucent-20); + + --theme-color-link-default: var(--dagster-blue-200); + --theme-color-link-hover: var(--dagster-blue-400); + + --theme-color-text-default: var(--dagster-white); + --theme-color-text-light: var(--dagster-gray-300); + --theme-color-text-lighter: var(--dagster-gray-500); + --theme-color-text-disabled: var(--dagster-gray-600); + --theme-color-text-red: var(--dagster-red-200); + --theme-color-text-green: var(--dagster-green-200); + --theme-color-text-blue: var(--dagster-blue-200); + --theme-color-text-yellow: var(--dagster-yellow-200); + --theme-color-text-cyan: var(--dagster-cyan-200); + + --theme-color-keyline: var(--dagster-gray-translucent-30); + --theme-color-border-default: var(--dagster-gray-800); + --theme-color-border-hover: var(--dagster-gray-700); + + --theme-color-accent-primary: var(--dagster-gray-150); + --theme-color-accent-primary-hover: var(--dagster-gray-50); + --theme-color-accent-reversed: var(--dagster-gray-950); + --theme-color-accent-reversed-hover: var(--dagster-800); + --theme-color-accent-red: var(--dagster-red-500); + --theme-color-accent-red-hover: var(--dagster-red-600); + --theme-color-accent-green: var(--dagster-green-500); + --theme-color-accent-green-hover: var(--dagster-green-600); + --theme-color-accent-blue: var(--dagster-blue-300); + --theme-color-accent-blue-hover: var(--dagster-blue-600); + --theme-color-accent-yellow: var(--dagster-yellow-500); + --theme-color-accent-yellow-hover: var(--dagster-yellow-600); + --theme-color-accent-cyan: var(--dagster-cyan-500); + --theme-color-accent-cyan-hover: var(--dagster-cyan-600); + --theme-color-accent-gray: var(--dagster-gray-500); + --theme-color-accent-gray-hover: var(--dagster-gray-600); + --theme-color-accent-lavendar: var(--dagster-blue-300); + --theme-color-accent-lavendar-hover: var(--dagster-blue-200); + + //App + --ifm-background-color: var(--theme-color-background-default); + --ifm-link-color: var(--theme-color-link-default); + --ifm-toc-border-color: var(--theme-color-keyline); + --docusaurus-highlighted-code-line-bg: var(--theme-color-background-gray); + + //Top Navbar + --ifm-navbar-height: 60px; + --ifm-navbar-background-color: var(--theme-color-background-default); + + //Left Menu + --ifm-menu-color: var(--theme-color-text-light); + --ifm-hover-overlay: var(--theme-color-background-blue); + --ifm-menu-color-active: var(--theme-color-background-blue); + + //hr + --ifm-hr-background-color: var(--theme-color-background-lighter); + + //invert footer icon colors + .footer__items--right { + img { + filter: invert(1); + } + } +} diff --git a/docs/docs-beta/src/styles/theme-globals.scss b/docs/docs-beta/src/styles/theme-globals.scss new file mode 100644 index 0000000000000..d9a029add958b --- /dev/null +++ b/docs/docs-beta/src/styles/theme-globals.scss @@ -0,0 +1,45 @@ +@font-face { + font-family: 'Geist'; + src: url('/fonts/GeistVF.woff') format('woff'); +} + +@font-face { + font-family: 'Geist-Mono'; + src: url('/fonts/GeistMonoVF.woff') format('woff'); +} + +//Global Styles + +:root { + --theme-font-base: 'Geist', 'Inter', 'Arial', sans-serif; + --theme-font-mono: 'Geist-Mono', monospace; + --theme-font-size-base: 15px; + --theme-font-size-sm: 13px; + --theme-font-size-lg: 18px; + --ifm-font-weight-semibold: 600; + --ifm-font-family-base: var(--theme-font-base); + --ifm-heading-font-family: var(--theme-font-base); + --ifm-font-family-monospace: var(--theme-font-mono); + --ifm-code-font-size: 86%; + --ifm-background-color: var(--theme-accent-red); + --ifm-navbar-item-padding-horizontal: 0; + --ifm-tabs-color-active: var(--theme-color-accent-blue); + --ifm-tabs-color-active-border: var(--theme-color-accent-blue); + --ifm-tabs-color: var(--theme-color-text-default); + --ifm-tabs-color-hover: var(--theme-color-background-light); + --ifm-table-head-color: var(--theme-color-text-default); + --ifm-table-head-background: var(--theme-color-background-light); + --ifm-table-stripe-background: var(--theme-color-background-light); + --ifm-footer-padding-vertical: 24px; + --ifm-footer-padding-horizontal: 0px; + --ifm-footer-link-color: var(--theme-color-text-lighter); + --ifm-global-spacing: 24px; + --ifm-navbar-padding-vertical: 0px; + --ifm-navbar-item-padding-horizontal: 0px; + --ifm-navbar-item-padding-vertical: 20px; + --docsearch-highlight-color: var(--theme-color-background-gray) !important; + --docsearch-hit-active-color: var(--theme-color-text-default) !important; + --ifm-card-background-color: var(--theme-color-background-light); + --ifm-card-border-color: var(--theme-color-keyline); + --ifm-alert-padding-vertical: 16px; +} diff --git a/docs/docs-beta/src/styles/theme-light.scss b/docs/docs-beta/src/styles/theme-light.scss new file mode 100644 index 0000000000000..a58ce212172bc --- /dev/null +++ b/docs/docs-beta/src/styles/theme-light.scss @@ -0,0 +1,107 @@ +:root { + //***LIGHT THEME*** + --theme-color-background-default: var(--dagster-white); + --theme-color-background-light: var(--dagster-gray-10); + --theme-color-background-lighter: var(--dagster-gray-100); + --theme-color-background-red: var(--dagster-red-translucent-12); + --theme-color-background-green: var(--dagster-green-translucent-12); + --theme-color-background-blue: var(--dagster-blue-translucent-12); + --theme-color-background-yellow: var(--dagster-yellow-translucent-12); + --theme-color-background-cyan: var(--dagster-cyan-translucent-12); + --theme-color-background-gray: var(--dagster-gray-translucent-12); + + --theme-color-link-default: var(--dagster-blue-700); + --theme-color-link-hover: var(--dagster-blue-500); + + --theme-color-text-default: var(--dagster-gray-990); + --theme-color-text-light: var(--dagster-gray-700); + --theme-color-text-lighter: var(--dagster-gray-550); + --theme-color-text-disabled: var(--dagster-gray-400); + --theme-color-text-red: var(--dagster-red-700); + --theme-color-text-green: var(--dagster-green-700); + --theme-color-text-blue: var(--dagster-blue-700); + --theme-color-text-yellow: var(--dagster-yellow-700); + --theme-color-text-cyan: var(--dagster-cyan-700); + + --theme-color-keyline: var(--dagster-gray-translucent-12); + --theme-color-border-default: var(--dagster-gray-200); + --theme-color-border-hover: var(--dagster-gray-300); + + --theme-color-accent-primary: var(--dagster-gray-950); + --theme-color-accent-primary-hover: var(--dagster-gray-800); + --theme-color-accent-reversed: var(--dagster-gray-10); + --theme-color-accent-reversed-hover: var(--dagster-white); + --theme-color-accent-red: var(--dagster-red-500); + --theme-color-accent-red-hover: var(--dagster-red-400); + --theme-color-accent-green: var(--dagster-green-500); + --theme-color-accent-green-hover: var(--dagster-green-400); + --theme-color-accent-blue: var(--dagster-blue-500); + --theme-color-accent-blue-hover: var(--dagster-blue-400); + --theme-color-accent-yellow: var(--dagster-yellow-500); + --theme-color-accent-yellow-hover: var(--dagster-yellow-400); + --theme-color-accent-cyan: var(--dagster-cyan-500); + --theme-color-accent-cyan-hover: var(--dagster-cyan-400); + --theme-color-accent-gray: var(--dagster-gray-500); + --theme-color-accent-gray-hover: var(--dagster-gray-400); + --theme-color-accent-lavendar: var(--dagster-blue-200); + --theme-color-accent-lavendar-hover: var(--dagster-blue-100); + + // modified base colors + --ifm-color-primary: var(--theme-color-accent-primary); + --ifm-color-primary-dark: var(--dagster-gray-850); + --ifm-color-primary-darker: var(--dagster-gray-900); + --ifm-color-primary-darkest: var(--dagster-gray-990); + --ifm-color-primary-light: var(--dagster-gray-100); + --ifm-color-primary-lighter: var(--dagster-gray-50); + --ifm-color-primary-lightest: var(--dagster-white); + + //App + --ifm-background-color: var(--theme-color-background-default); + --ifm-link-color: var(--theme-color-link-default); + --ifm-toc-link-color: var(--theme-color-text-light); + --ifm-toc-border-color: var(--theme-color-keyline); + --ifm-navbar-padding-horizontal: 20px; + --ifm-navbar-link-color: var(--theme-color-text-light); + --ifm-navbar-link-hover-color: var(--theme-color-text-default); + + //Top Navbar + --ifm-navbar-height: 60px; + --ifm-navbar-background-color: var(--theme-color-background-default); + + //Left Menu + --ifm-menu-color: var(--theme-color-text-light); + --ifm-hover-overlay: var(--theme-color-background-blue); + --ifm-menu-color-active: var(--theme-color-background-blue); + + //hr + --ifm-hr-background-color: var(--theme-color-background-light); + + // docusaurus + --docusaurus-highlighted-code-line-bg: var(--theme-color-background-gray); + + // infima shadow levels + // generated from https://www.joshwcomeau.com/shadow-palette/ + --shadow-color: 0deg 0% 63%; + --shadow-elevation-low: 0px 1px 1px hsl(var(--shadow-color) / 0.07), + 0px 1.4px 1.4px -1.8px hsl(var(--shadow-color) / 0.06), + -0.1px 3px 2.9px -3.5px hsl(var(--shadow-color) / 0.04); + --shadow-elevation-medium: 0px 1px 1px hsl(var(--shadow-color) / 0.06), + 0px 1.8px 1.7px -0.9px hsl(var(--shadow-color) / 0.05), + -0.1px 3.5px 3.4px -1.8px hsl(var(--shadow-color) / 0.05), + -0.1px 7.5px 7.3px -2.7px hsl(var(--shadow-color) / 0.04), + -0.3px 15px 14.5px -3.5px hsl(var(--shadow-color) / 0.03); + --shadow-elevation-high: 0px 1px 1px hsl(var(--shadow-color) / 0.06), + 0px 2.1px 2px -0.4px hsl(var(--shadow-color) / 0.05), + -0.1px 3.4px 3.3px -0.8px hsl(var(--shadow-color) / 0.05), + -0.1px 5.2px 5px -1.2px hsl(var(--shadow-color) / 0.05), + -0.1px 7.9px 7.6px -1.6px hsl(var(--shadow-color) / 0.04), + -0.2px 12px 11.6px -2px hsl(var(--shadow-color) / 0.04), + -0.3px 17.9px 17.3px -2.4px hsl(var(--shadow-color) / 0.04), + -0.5px 25.9px 25.1px -2.8px hsl(var(--shadow-color) / 0.03), + -0.7px 36.5px 35.3px -3.2px hsl(var(--shadow-color) / 0.03), + -0.9px 50px 48.4px -3.5px hsl(var(--shadow-color) / 0.03); + + --ifm-global-shadow-lw: var(--shadow-elevation-low); + --ifm-global-shadow-md: var(--shadow-elevation-medium); + --ifm-global-shadow-tl: var(--shadow-elevation-high); +} diff --git a/docs/docs-beta/src/theme/MDXComponents.tsx b/docs/docs-beta/src/theme/MDXComponents.tsx new file mode 100644 index 0000000000000..d26d4fe3c81ca --- /dev/null +++ b/docs/docs-beta/src/theme/MDXComponents.tsx @@ -0,0 +1,19 @@ +// Import the original mapper +import MDXComponents from '@theme-original/MDXComponents'; +import { PyObject } from '../components/PyObject'; +import CodeExample from '../components/CodeExample'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; +import TOCInline from '@theme/TOCInline'; +import Link from '@docusaurus/Link'; + +export default { + // Re-use the default mapping + ...MDXComponents, + PyObject, + Tabs, + TabItem, + CodeExample, + TOCInline, + Link, +}; diff --git a/docs/docs-beta/src/theme/NotFound/Content/index.tsx b/docs/docs-beta/src/theme/NotFound/Content/index.tsx new file mode 100644 index 0000000000000..b83cdc0b20424 --- /dev/null +++ b/docs/docs-beta/src/theme/NotFound/Content/index.tsx @@ -0,0 +1,34 @@ +import React from 'react'; +import clsx from 'clsx'; +import Link from '@docusaurus/Link'; +import type {Props} from '@theme/NotFound/Content'; +import Heading from '@theme/Heading'; + +export default function NotFoundContent({className}: Props): JSX.Element { + return ( +
      +
      +
      +
      + + 404 + +

      + We can't seem to find what you're looking for. +

      +

      + If you believe that this is an error—we would greatly appreciate it if you + opened a GitHub issue + . +

      +
      +
      + Welcome to Dagster + Build your first Dagster project + Build your first ETL pipeline +
      +
      +
      +
      + ); +} diff --git a/docs/docs-beta/src/theme/NotFound/index.tsx b/docs/docs-beta/src/theme/NotFound/index.tsx new file mode 100644 index 0000000000000..0a43859106300 --- /dev/null +++ b/docs/docs-beta/src/theme/NotFound/index.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import {translate} from '@docusaurus/Translate'; +import {PageMetadata} from '@docusaurus/theme-common'; +import Layout from '@theme/Layout'; +import NotFoundContent from '@theme/NotFound/Content'; + +export default function Index(): JSX.Element { + const title = translate({ + id: 'theme.NotFound.title', + message: 'Page Not Found', + }); + return ( + <> + + + + + + ); +} diff --git a/examples/docs_snippets/docs_snippets_tests/tutorial_tests/saving/__init__.py b/docs/docs-beta/static/.nojekyll similarity index 100% rename from examples/docs_snippets/docs_snippets_tests/tutorial_tests/saving/__init__.py rename to docs/docs-beta/static/.nojekyll diff --git a/docs/docs-beta/static/fonts/GeistMonoVF.woff b/docs/docs-beta/static/fonts/GeistMonoVF.woff new file mode 100644 index 0000000000000..f2ae185cbfd16 Binary files /dev/null and b/docs/docs-beta/static/fonts/GeistMonoVF.woff differ diff --git a/docs/docs-beta/static/fonts/GeistVF.woff b/docs/docs-beta/static/fonts/GeistVF.woff new file mode 100644 index 0000000000000..1b62daacff96d Binary files /dev/null and b/docs/docs-beta/static/fonts/GeistVF.woff differ diff --git a/docs/docs-beta/static/icons/arrow_drop_down.svg b/docs/docs-beta/static/icons/arrow_drop_down.svg new file mode 100644 index 0000000000000..44b925d6058a4 --- /dev/null +++ b/docs/docs-beta/static/icons/arrow_drop_down.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/icons/chevron.svg b/docs/docs-beta/static/icons/chevron.svg new file mode 100644 index 0000000000000..9f64564e1f0e1 --- /dev/null +++ b/docs/docs-beta/static/icons/chevron.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/icons/github.svg b/docs/docs-beta/static/icons/github.svg new file mode 100644 index 0000000000000..085c99b105b5d --- /dev/null +++ b/docs/docs-beta/static/icons/github.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/docs-beta/static/icons/slack.svg b/docs/docs-beta/static/icons/slack.svg new file mode 100644 index 0000000000000..4db5a07ba6c75 --- /dev/null +++ b/docs/docs-beta/static/icons/slack.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/docs-beta/static/icons/twitter.svg b/docs/docs-beta/static/icons/twitter.svg new file mode 100644 index 0000000000000..71441d75d83f4 --- /dev/null +++ b/docs/docs-beta/static/icons/twitter.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/icons/youtube.svg b/docs/docs-beta/static/icons/youtube.svg new file mode 100644 index 0000000000000..30839d65b2d3d --- /dev/null +++ b/docs/docs-beta/static/icons/youtube.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/images/getting-started/quickstart/dagster-ui-start.png b/docs/docs-beta/static/images/getting-started/quickstart/dagster-ui-start.png new file mode 100644 index 0000000000000..ed8c8c927b74f Binary files /dev/null and b/docs/docs-beta/static/images/getting-started/quickstart/dagster-ui-start.png differ diff --git a/docs/docs-beta/static/img/dagster-cowboy.png b/docs/docs-beta/static/img/dagster-cowboy.png new file mode 100644 index 0000000000000..6811b05d24df1 Binary files /dev/null and b/docs/docs-beta/static/img/dagster-cowboy.png differ diff --git a/docs/docs-beta/static/img/dagster-docs-logo-dark.svg b/docs/docs-beta/static/img/dagster-docs-logo-dark.svg new file mode 100644 index 0000000000000..10062079d9c3b --- /dev/null +++ b/docs/docs-beta/static/img/dagster-docs-logo-dark.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/docs-beta/static/img/dagster-docs-logo.svg b/docs/docs-beta/static/img/dagster-docs-logo.svg new file mode 100644 index 0000000000000..79baa887b5391 --- /dev/null +++ b/docs/docs-beta/static/img/dagster-docs-logo.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/docs-beta/static/img/dagster_labs-primary-horizontal.svg b/docs/docs-beta/static/img/dagster_labs-primary-horizontal.svg new file mode 100644 index 0000000000000..8bba1584587c5 --- /dev/null +++ b/docs/docs-beta/static/img/dagster_labs-primary-horizontal.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/docs-beta/static/img/dagster_labs-reversed-horizontal.svg b/docs/docs-beta/static/img/dagster_labs-reversed-horizontal.svg new file mode 100644 index 0000000000000..3b726278795c3 --- /dev/null +++ b/docs/docs-beta/static/img/dagster_labs-reversed-horizontal.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/docs-beta/static/img/favicon.ico b/docs/docs-beta/static/img/favicon.ico new file mode 100644 index 0000000000000..7721e27046004 Binary files /dev/null and b/docs/docs-beta/static/img/favicon.ico differ diff --git a/docs/docs-beta/static/img/getting-started/icon-assets.svg b/docs/docs-beta/static/img/getting-started/icon-assets.svg new file mode 100644 index 0000000000000..70d93cbf6da54 --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-assets.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/img/getting-started/icon-github.svg b/docs/docs-beta/static/img/getting-started/icon-github.svg new file mode 100644 index 0000000000000..4fe13676e4f25 --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-github.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/docs-beta/static/img/getting-started/icon-plus.svg b/docs/docs-beta/static/img/getting-started/icon-plus.svg new file mode 100644 index 0000000000000..9caf5adb44bf0 --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-plus.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/img/getting-started/icon-slack.svg b/docs/docs-beta/static/img/getting-started/icon-slack.svg new file mode 100644 index 0000000000000..0e77f6567c543 --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-slack.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/docs-beta/static/img/getting-started/icon-start.svg b/docs/docs-beta/static/img/getting-started/icon-start.svg new file mode 100644 index 0000000000000..6496929a6a918 --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-start.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/img/getting-started/icon-youtube.svg b/docs/docs-beta/static/img/getting-started/icon-youtube.svg new file mode 100644 index 0000000000000..3d4b2ef0593ca --- /dev/null +++ b/docs/docs-beta/static/img/getting-started/icon-youtube.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/docs-beta/static/img/getting-started/lineage-dark.jpg b/docs/docs-beta/static/img/getting-started/lineage-dark.jpg new file mode 100644 index 0000000000000..733bc97ff0a8e Binary files /dev/null and b/docs/docs-beta/static/img/getting-started/lineage-dark.jpg differ diff --git a/docs/docs-beta/static/img/getting-started/lineage-light.jpg b/docs/docs-beta/static/img/getting-started/lineage-light.jpg new file mode 100644 index 0000000000000..fa6beff5eade7 Binary files /dev/null and b/docs/docs-beta/static/img/getting-started/lineage-light.jpg differ diff --git a/docs/docs-beta/static/img/logo.svg b/docs/docs-beta/static/img/logo.svg new file mode 100644 index 0000000000000..d2ae628553a7d --- /dev/null +++ b/docs/docs-beta/static/img/logo.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/docs/docs-beta/static/img/placeholder.svg b/docs/docs-beta/static/img/placeholder.svg new file mode 100644 index 0000000000000..6feb0acc979eb --- /dev/null +++ b/docs/docs-beta/static/img/placeholder.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/docs-beta/tsconfig.json b/docs/docs-beta/tsconfig.json new file mode 100644 index 0000000000000..ad9c629417c97 --- /dev/null +++ b/docs/docs-beta/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "@docusaurus/tsconfig", + "compilerOptions": { + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + }, + "types": ["node"] + }, + "exclude": ["**/node_modules/*", "blog", "docs", "build", "i18n"] +} diff --git a/docs/docs-beta/yarn.lock b/docs/docs-beta/yarn.lock new file mode 100644 index 0000000000000..e1639c30c79f6 --- /dev/null +++ b/docs/docs-beta/yarn.lock @@ -0,0 +1,15727 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 8 + cacheKey: 10c0 + +"@algolia/autocomplete-core@npm:1.9.3": + version: 1.9.3 + resolution: "@algolia/autocomplete-core@npm:1.9.3" + dependencies: + "@algolia/autocomplete-plugin-algolia-insights": "npm:1.9.3" + "@algolia/autocomplete-shared": "npm:1.9.3" + checksum: 10c0/a751b20f15c9a30b8b2d5a4f1f62fb4dbd012fb7ffec1b12308d6e7388b5a4dc83af52176634f17facb57a7727204843c5aa2f6e80efafaaf244275f44af11d9 + languageName: node + linkType: hard + +"@algolia/autocomplete-plugin-algolia-insights@npm:1.9.3": + version: 1.9.3 + resolution: "@algolia/autocomplete-plugin-algolia-insights@npm:1.9.3" + dependencies: + "@algolia/autocomplete-shared": "npm:1.9.3" + peerDependencies: + search-insights: ">= 1 < 3" + checksum: 10c0/574196f66fe828be1029439032376685020524d6c729dea99caef336cc7be244d2539fa91b3fe80db80efe3420c2c05063cab3534514be6c637bf1914b17a6f6 + languageName: node + linkType: hard + +"@algolia/autocomplete-preset-algolia@npm:1.9.3": + version: 1.9.3 + resolution: "@algolia/autocomplete-preset-algolia@npm:1.9.3" + dependencies: + "@algolia/autocomplete-shared": "npm:1.9.3" + peerDependencies: + "@algolia/client-search": ">= 4.9.1 < 6" + algoliasearch: ">= 4.9.1 < 6" + checksum: 10c0/38c1872db4dae69b4eec622db940c7a992d8530e33fbac7df593473ef404312076d9933b4a7ea25c2d401ea5b62ebd64b56aa25b5cdd8e8ba3fd309a39d9d816 + languageName: node + linkType: hard + +"@algolia/autocomplete-shared@npm:1.9.3": + version: 1.9.3 + resolution: "@algolia/autocomplete-shared@npm:1.9.3" + peerDependencies: + "@algolia/client-search": ">= 4.9.1 < 6" + algoliasearch: ">= 4.9.1 < 6" + checksum: 10c0/1aa926532c32be6bb5384c8c0ae51a312c9d79ed7486371218dfcb61c8ea1ed46171bdc9f9b596a266aece104a0ef76d6aac2f9a378a5a6eb4460e638d59f6ae + languageName: node + linkType: hard + +"@algolia/cache-browser-local-storage@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/cache-browser-local-storage@npm:4.24.0" + dependencies: + "@algolia/cache-common": "npm:4.24.0" + checksum: 10c0/68823c3b1c07dab093de98e678e2ff7fcf7a40915a157715f6f51d073e3865086be98cbbe554b7bf9e0514db5dd9e726033e27e566d9e5db059cb5059c3436cc + languageName: node + linkType: hard + +"@algolia/cache-common@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/cache-common@npm:4.24.0" + checksum: 10c0/ad481ad50d7ea92d0cce525757627f4a647b5373dc6d3cbed6405d05cb83f21a110919e7133e5233d5b13c2c8f59ed9e927efdbc82e70571707709075b07d2c6 + languageName: node + linkType: hard + +"@algolia/cache-in-memory@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/cache-in-memory@npm:4.24.0" + dependencies: + "@algolia/cache-common": "npm:4.24.0" + checksum: 10c0/2956600b2722f113373dbb71449f546afb5a0fb1a3d1558a1a3e957b7a630d1f25045c29646c8dbb44cdffe6ff4c9d1219bf63fc9fd8e4d5467381c7150e09f9 + languageName: node + linkType: hard + +"@algolia/client-account@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/client-account@npm:4.24.0" + dependencies: + "@algolia/client-common": "npm:4.24.0" + "@algolia/client-search": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/3dd52dd692a2194eb45844280e6261192d5a4ef99aec729a09a01da5cf071fd77b37c6d164bf8877823efc1484d576068d76ada764a4f0624238a3475bc199b2 + languageName: node + linkType: hard + +"@algolia/client-analytics@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/client-analytics@npm:4.24.0" + dependencies: + "@algolia/client-common": "npm:4.24.0" + "@algolia/client-search": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/8d02e6d0eb0dcde099832c62fa7d7e9910b2757b4d37e07e1eefb65a12fef7e7ce3d73fda23e8ee02d53953a91efc15086016b1af5e9fea9227dfc0fc61c9f63 + languageName: node + linkType: hard + +"@algolia/client-common@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/client-common@npm:4.24.0" + dependencies: + "@algolia/requester-common": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/9e75d0bb51bb04f099e823e4397d1bac6659e1ecb7c7a73a5eaf9153632d544bd6c62a4961b606490220b236361eb8b7b77a5e4c47f12aefdd2952b14ce2fd18 + languageName: node + linkType: hard + +"@algolia/client-personalization@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/client-personalization@npm:4.24.0" + dependencies: + "@algolia/client-common": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/9193e032841ae991ce6dd8c8988608d0d83a6785681abf26055812506aaf070db8d8f44403d0270384ff39530677603d103c330a869a397181d594bebe46b4b0 + languageName: node + linkType: hard + +"@algolia/client-search@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/client-search@npm:4.24.0" + dependencies: + "@algolia/client-common": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/d161235014fa73acc0ff04d737c695b7357c060d31db6d602464b27ba846208c6aeb35b179e76d4c33b51329b77de0c460f6cb21b66d364c18a5534874c7b987 + languageName: node + linkType: hard + +"@algolia/events@npm:^4.0.1": + version: 4.0.1 + resolution: "@algolia/events@npm:4.0.1" + checksum: 10c0/f398d815c6ed21ac08f6caadf1e9155add74ac05d99430191c3b1f1335fd91deaf468c6b304e6225c9885d3d44c06037c53def101e33d9c22daff175b2a65ca9 + languageName: node + linkType: hard + +"@algolia/logger-common@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/logger-common@npm:4.24.0" + checksum: 10c0/1ebe93901a2b3ce41696b535d028337c1c6a98a4262868117c16dd603cc8bb106b840e45cf53c08d098cf518e07bedc64a59cc86bef18795dc49031c2c208d31 + languageName: node + linkType: hard + +"@algolia/logger-console@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/logger-console@npm:4.24.0" + dependencies: + "@algolia/logger-common": "npm:4.24.0" + checksum: 10c0/fdfa3983e6c38cc7b69d66e1085ac702e009d693bd49d64b27cad9ba4197788a8784529a8ed9c25e6ccd51cc4ad3a2427241ecc322c22ca2c8ce6a8d4d94fe69 + languageName: node + linkType: hard + +"@algolia/recommend@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/recommend@npm:4.24.0" + dependencies: + "@algolia/cache-browser-local-storage": "npm:4.24.0" + "@algolia/cache-common": "npm:4.24.0" + "@algolia/cache-in-memory": "npm:4.24.0" + "@algolia/client-common": "npm:4.24.0" + "@algolia/client-search": "npm:4.24.0" + "@algolia/logger-common": "npm:4.24.0" + "@algolia/logger-console": "npm:4.24.0" + "@algolia/requester-browser-xhr": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + "@algolia/requester-node-http": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/685fb5c1d85d7b9fd39d9246b49da5be4199fecc144bb350ed92fc191b66e4e1101ee6df9ca857ac5096f587638fa3366e01ddca0258f11000aa092ed68daea3 + languageName: node + linkType: hard + +"@algolia/requester-browser-xhr@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/requester-browser-xhr@npm:4.24.0" + dependencies: + "@algolia/requester-common": "npm:4.24.0" + checksum: 10c0/2d277b291bcc0a388f114116879c15a96c057f698b026c32e719b354c2e2e03e05b3c304f45d2354eb4dd8dfa519d481af51ce8ef19b6fb4fd6d384cf41373de + languageName: node + linkType: hard + +"@algolia/requester-common@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/requester-common@npm:4.24.0" + checksum: 10c0/cf88ca1f04f4243515bbfa05d7cf51afe6a57904390d9e1ccab799bae20f6fa77e954d9eee9d5c718086582aeb478e271ccf1d5a6a5ab943494250dce820268e + languageName: node + linkType: hard + +"@algolia/requester-node-http@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/requester-node-http@npm:4.24.0" + dependencies: + "@algolia/requester-common": "npm:4.24.0" + checksum: 10c0/e9cef1463f29035a44f12941ddeb343a213ff512c61ade46a07db19b2023f49a5ac12024a3f56d8b9c0c5b2bd32466030c5e27b26a6a6e17773b810388ddb3b7 + languageName: node + linkType: hard + +"@algolia/transporter@npm:4.24.0": + version: 4.24.0 + resolution: "@algolia/transporter@npm:4.24.0" + dependencies: + "@algolia/cache-common": "npm:4.24.0" + "@algolia/logger-common": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + checksum: 10c0/9eee8e6613c8d2a5562e4df284dc7b0804a7bf80586fd8512ad769dc4829f947a334480378d94efd3cc57ca4d400886eb677786a3c5664f85881093f9e27cab7 + languageName: node + linkType: hard + +"@ampproject/remapping@npm:^2.2.0": + version: 2.3.0 + resolution: "@ampproject/remapping@npm:2.3.0" + dependencies: + "@jridgewell/gen-mapping": "npm:^0.3.5" + "@jridgewell/trace-mapping": "npm:^0.3.24" + checksum: 10c0/81d63cca5443e0f0c72ae18b544cc28c7c0ec2cea46e7cb888bb0e0f411a1191d0d6b7af798d54e30777d8d1488b2ec0732aac2be342d3d7d3ffd271c6f489ed + languageName: node + linkType: hard + +"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.21.4, @babel/code-frame@npm:^7.24.7, @babel/code-frame@npm:^7.8.3": + version: 7.24.7 + resolution: "@babel/code-frame@npm:7.24.7" + dependencies: + "@babel/highlight": "npm:^7.24.7" + picocolors: "npm:^1.0.0" + checksum: 10c0/ab0af539473a9f5aeaac7047e377cb4f4edd255a81d84a76058595f8540784cc3fbe8acf73f1e073981104562490aabfb23008cd66dc677a456a4ed5390fdde6 + languageName: node + linkType: hard + +"@babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.25.2": + version: 7.25.2 + resolution: "@babel/compat-data@npm:7.25.2" + checksum: 10c0/5bf1f14d6e5f0d37c19543e99209ff4a94bb97915e1ce01e5334a144aa08cd56b6e62ece8135dac77e126723d63d4d4b96fc603a12c43b88c28f4b5e070270c5 + languageName: node + linkType: hard + +"@babel/core@npm:^7.21.3, @babel/core@npm:^7.23.3, @babel/core@npm:^7.25.2": + version: 7.25.2 + resolution: "@babel/core@npm:7.25.2" + dependencies: + "@ampproject/remapping": "npm:^2.2.0" + "@babel/code-frame": "npm:^7.24.7" + "@babel/generator": "npm:^7.25.0" + "@babel/helper-compilation-targets": "npm:^7.25.2" + "@babel/helper-module-transforms": "npm:^7.25.2" + "@babel/helpers": "npm:^7.25.0" + "@babel/parser": "npm:^7.25.0" + "@babel/template": "npm:^7.25.0" + "@babel/traverse": "npm:^7.25.2" + "@babel/types": "npm:^7.25.2" + convert-source-map: "npm:^2.0.0" + debug: "npm:^4.1.0" + gensync: "npm:^1.0.0-beta.2" + json5: "npm:^2.2.3" + semver: "npm:^6.3.1" + checksum: 10c0/a425fa40e73cb72b6464063a57c478bc2de9dbcc19c280f1b55a3d88b35d572e87e8594e7d7b4880331addb6faef641bbeb701b91b41b8806cd4deae5d74f401 + languageName: node + linkType: hard + +"@babel/eslint-parser@npm:^7.25.1": + version: 7.25.1 + resolution: "@babel/eslint-parser@npm:7.25.1" + dependencies: + "@nicolo-ribaudo/eslint-scope-5-internals": "npm:5.1.1-v1" + eslint-visitor-keys: "npm:^2.1.0" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.11.0 + eslint: ^7.5.0 || ^8.0.0 || ^9.0.0 + checksum: 10c0/9f98351b32edfced9e6308a80ad69af1210d9c9780f19339cb286d0c9be0a9afac80d1df3b3793112e720675ce5b927920b19454d0f48ddf8370d08ab62d0dc2 + languageName: node + linkType: hard + +"@babel/generator@npm:^7.23.3, @babel/generator@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/generator@npm:7.25.0" + dependencies: + "@babel/types": "npm:^7.25.0" + "@jridgewell/gen-mapping": "npm:^0.3.5" + "@jridgewell/trace-mapping": "npm:^0.3.25" + jsesc: "npm:^2.5.1" + checksum: 10c0/d0e2dfcdc8bdbb5dded34b705ceebf2e0bc1b06795a1530e64fb6a3ccf313c189db7f60c1616effae48114e1a25adc75855bc4496f3779a396b3377bae718ce7 + languageName: node + linkType: hard + +"@babel/helper-annotate-as-pure@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-annotate-as-pure@npm:7.24.7" + dependencies: + "@babel/types": "npm:^7.24.7" + checksum: 10c0/4679f7df4dffd5b3e26083ae65228116c3da34c3fff2c11ae11b259a61baec440f51e30fd236f7a0435b9d471acd93d0bc5a95df8213cbf02b1e083503d81b9a + languageName: node + linkType: hard + +"@babel/helper-builder-binary-assignment-operator-visitor@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-builder-binary-assignment-operator-visitor@npm:7.24.7" + dependencies: + "@babel/traverse": "npm:^7.24.7" + "@babel/types": "npm:^7.24.7" + checksum: 10c0/0ed84abf848c79fb1cd4c1ddac12c771d32c1904d87fc3087f33cfdeb0c2e0db4e7892b74b407d9d8d0c000044f3645a7391a781f788da8410c290bb123a1f13 + languageName: node + linkType: hard + +"@babel/helper-compilation-targets@npm:^7.22.6, @babel/helper-compilation-targets@npm:^7.24.7, @babel/helper-compilation-targets@npm:^7.24.8, @babel/helper-compilation-targets@npm:^7.25.2": + version: 7.25.2 + resolution: "@babel/helper-compilation-targets@npm:7.25.2" + dependencies: + "@babel/compat-data": "npm:^7.25.2" + "@babel/helper-validator-option": "npm:^7.24.8" + browserslist: "npm:^4.23.1" + lru-cache: "npm:^5.1.1" + semver: "npm:^6.3.1" + checksum: 10c0/de10e986b5322c9f807350467dc845ec59df9e596a5926a3b5edbb4710d8e3b8009d4396690e70b88c3844fe8ec4042d61436dd4b92d1f5f75655cf43ab07e99 + languageName: node + linkType: hard + +"@babel/helper-create-class-features-plugin@npm:^7.24.7, @babel/helper-create-class-features-plugin@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/helper-create-class-features-plugin@npm:7.25.0" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-member-expression-to-functions": "npm:^7.24.8" + "@babel/helper-optimise-call-expression": "npm:^7.24.7" + "@babel/helper-replace-supers": "npm:^7.25.0" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + "@babel/traverse": "npm:^7.25.0" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/2f8ac36cfeb45d462432acea64c78312cc9180dda7aa9337b77017961e373c323065362d2452f3d6f8bffeb254ff3f7346ac1b25c8ad7b81db813a95924f4053 + languageName: node + linkType: hard + +"@babel/helper-create-regexp-features-plugin@npm:^7.18.6, @babel/helper-create-regexp-features-plugin@npm:^7.24.7, @babel/helper-create-regexp-features-plugin@npm:^7.25.0": + version: 7.25.2 + resolution: "@babel/helper-create-regexp-features-plugin@npm:7.25.2" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + regexpu-core: "npm:^5.3.1" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/85a7e3639c118856fb1113f54fb7e3bf7698171ddfd0cd6fccccd5426b3727bc1434fe7f69090441dcde327feef9de917e00d35e47ab820047057518dd675317 + languageName: node + linkType: hard + +"@babel/helper-define-polyfill-provider@npm:^0.6.2": + version: 0.6.2 + resolution: "@babel/helper-define-polyfill-provider@npm:0.6.2" + dependencies: + "@babel/helper-compilation-targets": "npm:^7.22.6" + "@babel/helper-plugin-utils": "npm:^7.22.5" + debug: "npm:^4.1.1" + lodash.debounce: "npm:^4.0.8" + resolve: "npm:^1.14.2" + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 10c0/f777fe0ee1e467fdaaac059c39ed203bdc94ef2465fb873316e9e1acfc511a276263724b061e3b0af2f6d7ad3ff174f2bb368fde236a860e0f650fda43d7e022 + languageName: node + linkType: hard + +"@babel/helper-member-expression-to-functions@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/helper-member-expression-to-functions@npm:7.24.8" + dependencies: + "@babel/traverse": "npm:^7.24.8" + "@babel/types": "npm:^7.24.8" + checksum: 10c0/7e14a5acc91f6cd26305a4441b82eb6f616bd70b096a4d2099a968f16b26d50207eec0b9ebfc466fefd62bd91587ac3be878117cdfec819b7151911183cb0e5a + languageName: node + linkType: hard + +"@babel/helper-module-imports@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-module-imports@npm:7.24.7" + dependencies: + "@babel/traverse": "npm:^7.24.7" + "@babel/types": "npm:^7.24.7" + checksum: 10c0/97c57db6c3eeaea31564286e328a9fb52b0313c5cfcc7eee4bc226aebcf0418ea5b6fe78673c0e4a774512ec6c86e309d0f326e99d2b37bfc16a25a032498af0 + languageName: node + linkType: hard + +"@babel/helper-module-transforms@npm:^7.24.7, @babel/helper-module-transforms@npm:^7.24.8, @babel/helper-module-transforms@npm:^7.25.0, @babel/helper-module-transforms@npm:^7.25.2": + version: 7.25.2 + resolution: "@babel/helper-module-transforms@npm:7.25.2" + dependencies: + "@babel/helper-module-imports": "npm:^7.24.7" + "@babel/helper-simple-access": "npm:^7.24.7" + "@babel/helper-validator-identifier": "npm:^7.24.7" + "@babel/traverse": "npm:^7.25.2" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/adaa15970ace0aee5934b5a633789b5795b6229c6a9cf3e09a7e80aa33e478675eee807006a862aa9aa517935d81f88a6db8a9f5936e3a2a40ec75f8062bc329 + languageName: node + linkType: hard + +"@babel/helper-optimise-call-expression@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-optimise-call-expression@npm:7.24.7" + dependencies: + "@babel/types": "npm:^7.24.7" + checksum: 10c0/ca6a9884705dea5c95a8b3ce132d1e3f2ae951ff74987d400d1d9c215dae9c0f9e29924d8f8e131e116533d182675bc261927be72f6a9a2968eaeeaa51eb1d0f + languageName: node + linkType: hard + +"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.18.6, @babel/helper-plugin-utils@npm:^7.22.5, @babel/helper-plugin-utils@npm:^7.24.7, @babel/helper-plugin-utils@npm:^7.24.8, @babel/helper-plugin-utils@npm:^7.8.0, @babel/helper-plugin-utils@npm:^7.8.3": + version: 7.24.8 + resolution: "@babel/helper-plugin-utils@npm:7.24.8" + checksum: 10c0/0376037f94a3bfe6b820a39f81220ac04f243eaee7193774b983e956c1750883ff236b30785795abbcda43fac3ece74750566830c2daa4d6e3870bb0dff34c2d + languageName: node + linkType: hard + +"@babel/helper-remap-async-to-generator@npm:^7.24.7, @babel/helper-remap-async-to-generator@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/helper-remap-async-to-generator@npm:7.25.0" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-wrap-function": "npm:^7.25.0" + "@babel/traverse": "npm:^7.25.0" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/0d17b5f7bb6a607edc9cc62fff8056dd9f341bf2f919884f97b99170d143022a5e7ae57922c4891e4fc360ad291e708d2f8cd8989f1d3cd7a17600159984f5a6 + languageName: node + linkType: hard + +"@babel/helper-replace-supers@npm:^7.24.7, @babel/helper-replace-supers@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/helper-replace-supers@npm:7.25.0" + dependencies: + "@babel/helper-member-expression-to-functions": "npm:^7.24.8" + "@babel/helper-optimise-call-expression": "npm:^7.24.7" + "@babel/traverse": "npm:^7.25.0" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/b4b6650ab3d56c39a259367cd97f8df2f21c9cebb3716fea7bca40a150f8847bfb82f481e98927c7c6579b48a977b5a8f77318a1c6aeb497f41ecd6dbc3fdfef + languageName: node + linkType: hard + +"@babel/helper-simple-access@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-simple-access@npm:7.24.7" + dependencies: + "@babel/traverse": "npm:^7.24.7" + "@babel/types": "npm:^7.24.7" + checksum: 10c0/7230e419d59a85f93153415100a5faff23c133d7442c19e0cd070da1784d13cd29096ee6c5a5761065c44e8164f9f80e3a518c41a0256df39e38f7ad6744fed7 + languageName: node + linkType: hard + +"@babel/helper-skip-transparent-expression-wrappers@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-skip-transparent-expression-wrappers@npm:7.24.7" + dependencies: + "@babel/traverse": "npm:^7.24.7" + "@babel/types": "npm:^7.24.7" + checksum: 10c0/e3a9b8ac9c262ac976a1bcb5fe59694db5e6f0b4f9e7bdba5c7693b8b5e28113c23bdaa60fe8d3ec32a337091b67720b2053bcb3d5655f5406536c3d0584242b + languageName: node + linkType: hard + +"@babel/helper-string-parser@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/helper-string-parser@npm:7.24.8" + checksum: 10c0/6361f72076c17fabf305e252bf6d580106429014b3ab3c1f5c4eb3e6d465536ea6b670cc0e9a637a77a9ad40454d3e41361a2909e70e305116a23d68ce094c08 + languageName: node + linkType: hard + +"@babel/helper-validator-identifier@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/helper-validator-identifier@npm:7.24.7" + checksum: 10c0/87ad608694c9477814093ed5b5c080c2e06d44cb1924ae8320474a74415241223cc2a725eea2640dd783ff1e3390e5f95eede978bc540e870053152e58f1d651 + languageName: node + linkType: hard + +"@babel/helper-validator-option@npm:^7.24.7, @babel/helper-validator-option@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/helper-validator-option@npm:7.24.8" + checksum: 10c0/73db93a34ae89201351288bee7623eed81a54000779462a986105b54ffe82069e764afd15171a428b82e7c7a9b5fec10b5d5603b216317a414062edf5c67a21f + languageName: node + linkType: hard + +"@babel/helper-wrap-function@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/helper-wrap-function@npm:7.25.0" + dependencies: + "@babel/template": "npm:^7.25.0" + "@babel/traverse": "npm:^7.25.0" + "@babel/types": "npm:^7.25.0" + checksum: 10c0/d54601a98384c191cbc1ff07b03a19e288ef8d5c6bfafe270b2a303d96e7304eb296002921ed464cc1b105a547d1db146eb86b0be617924dee1ba1b379cdc216 + languageName: node + linkType: hard + +"@babel/helpers@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/helpers@npm:7.25.0" + dependencies: + "@babel/template": "npm:^7.25.0" + "@babel/types": "npm:^7.25.0" + checksum: 10c0/b7fe007fc4194268abf70aa3810365085e290e6528dcb9fbbf7a765d43c74b6369ce0f99c5ccd2d44c413853099daa449c9a0123f0b212ac8d18643f2e8174b8 + languageName: node + linkType: hard + +"@babel/highlight@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/highlight@npm:7.24.7" + dependencies: + "@babel/helper-validator-identifier": "npm:^7.24.7" + chalk: "npm:^2.4.2" + js-tokens: "npm:^4.0.0" + picocolors: "npm:^1.0.0" + checksum: 10c0/674334c571d2bb9d1c89bdd87566383f59231e16bcdcf5bb7835babdf03c9ae585ca0887a7b25bdf78f303984af028df52831c7989fecebb5101cc132da9393a + languageName: node + linkType: hard + +"@babel/parser@npm:^7.1.0, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.25.0, @babel/parser@npm:^7.25.3": + version: 7.25.3 + resolution: "@babel/parser@npm:7.25.3" + dependencies: + "@babel/types": "npm:^7.25.2" + bin: + parser: ./bin/babel-parser.js + checksum: 10c0/874b01349aedb805d6694f867a752fdc7469778fad76aca4548d2cc6ce96087c3ba5fb917a6f8d05d2d1a74aae309b5f50f1a4dba035f5a2c9fcfe6e106d2c4e + languageName: node + linkType: hard + +"@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:^7.25.3": + version: 7.25.3 + resolution: "@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:7.25.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/traverse": "npm:^7.25.3" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/814b4d3f102e7556a5053d1acf57ef601cfcff39a2c81b8cdc6a5c842e3cb9838f5925d1466a5f1e6416e74c9c83586a3c07fbd7fb8610a396c2becdf9ae5790 + languageName: node + linkType: hard + +"@babel/plugin-bugfix-safari-class-field-initializer-scope@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-bugfix-safari-class-field-initializer-scope@npm:7.25.0" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/9645a1f47b3750acadb1353c02e71cc712d072aafe5ce115ed3a886bc14c5d9200cfb0b5b5e60e813baa549b800cf798f8714019fd246c699053cf68c428e426 + languageName: node + linkType: hard + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.25.0" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/ed1ce1c90cac46c01825339fd0f2a96fa071b016fb819d8dfaf8e96300eae30e74870cb47e4dc80d4ce2fb287869f102878b4f3b35bc927fec8b1d0d76bcf612 + languageName: node + linkType: hard + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + "@babel/plugin-transform-optional-chaining": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.13.0 + checksum: 10c0/aeb6e7aa363a47f815cf956ea1053c5dd8b786a17799f065c9688ba4b0051fe7565d258bbe9400bfcbfb3114cb9fda66983e10afe4d750bc70ff75403e15dd36 + languageName: node + linkType: hard + +"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:7.25.0" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/traverse": "npm:^7.25.0" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/45988025537a9d4a27b610fd696a18fd9ba9336621a69b4fb40560eeb10c79657f85c92a37f30c7c8fb29c22970eea0b373315795a891f1a05549a6cfe5a6bfe + languageName: node + linkType: hard + +"@babel/plugin-proposal-private-property-in-object@npm:7.21.0-placeholder-for-preset-env.2": + version: 7.21.0-placeholder-for-preset-env.2 + resolution: "@babel/plugin-proposal-private-property-in-object@npm:7.21.0-placeholder-for-preset-env.2" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/e605e0070da087f6c35579499e65801179a521b6842c15181a1e305c04fded2393f11c1efd09b087be7f8b083d1b75e8f3efcbc1292b4f60d3369e14812cff63 + languageName: node + linkType: hard + +"@babel/plugin-syntax-async-generators@npm:^7.8.4": + version: 7.8.4 + resolution: "@babel/plugin-syntax-async-generators@npm:7.8.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/d13efb282838481348c71073b6be6245b35d4f2f964a8f71e4174f235009f929ef7613df25f8d2338e2d3e44bc4265a9f8638c6aaa136d7a61fe95985f9725c8 + languageName: node + linkType: hard + +"@babel/plugin-syntax-class-properties@npm:^7.12.13": + version: 7.12.13 + resolution: "@babel/plugin-syntax-class-properties@npm:7.12.13" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.12.13" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/95168fa186416195280b1264fb18afcdcdcea780b3515537b766cb90de6ce042d42dd6a204a39002f794ae5845b02afb0fd4861a3308a861204a55e68310a120 + languageName: node + linkType: hard + +"@babel/plugin-syntax-class-static-block@npm:^7.14.5": + version: 7.14.5 + resolution: "@babel/plugin-syntax-class-static-block@npm:7.14.5" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/4464bf9115f4a2d02ce1454411baf9cfb665af1da53709c5c56953e5e2913745b0fcce82982a00463d6facbdd93445c691024e310b91431a1e2f024b158f6371 + languageName: node + linkType: hard + +"@babel/plugin-syntax-dynamic-import@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-dynamic-import@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/9c50927bf71adf63f60c75370e2335879402648f468d0172bc912e303c6a3876927d8eb35807331b57f415392732ed05ab9b42c68ac30a936813ab549e0246c5 + languageName: node + linkType: hard + +"@babel/plugin-syntax-export-namespace-from@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-export-namespace-from@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/5100d658ba563829700cd8d001ddc09f4c0187b1a13de300d729c5b3e87503f75a6d6c99c1794182f7f1a9f546ee009df4f15a0ce36376e206ed0012fa7cdc24 + languageName: node + linkType: hard + +"@babel/plugin-syntax-import-assertions@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-syntax-import-assertions@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/b82c53e095274ee71c248551352d73441cf65b3b3fc0107258ba4e9aef7090772a425442b3ed1c396fa207d0efafde8929c87a17d3c885b3ca2021316e87e246 + languageName: node + linkType: hard + +"@babel/plugin-syntax-import-attributes@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-syntax-import-attributes@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/eccc54d0f03c96d0eec7a6e2fa124dadbc7298345b62ffc4238f173308c4325b5598f139695ff05a95cf78412ef6903599e4b814496612bf39aad4715a16375b + languageName: node + linkType: hard + +"@babel/plugin-syntax-import-meta@npm:^7.10.4": + version: 7.10.4 + resolution: "@babel/plugin-syntax-import-meta@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/0b08b5e4c3128523d8e346f8cfc86824f0da2697b1be12d71af50a31aff7a56ceb873ed28779121051475010c28d6146a6bfea8518b150b71eeb4e46190172ee + languageName: node + linkType: hard + +"@babel/plugin-syntax-json-strings@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-json-strings@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/e98f31b2ec406c57757d115aac81d0336e8434101c224edd9a5c93cefa53faf63eacc69f3138960c8b25401315af03df37f68d316c151c4b933136716ed6906e + languageName: node + linkType: hard + +"@babel/plugin-syntax-jsx@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-syntax-jsx@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/f44d927a9ae8d5ef016ff5b450e1671e56629ddc12e56b938e41fd46e141170d9dfc9a53d6cb2b9a20a7dd266a938885e6a3981c60c052a2e1daed602ac80e51 + languageName: node + linkType: hard + +"@babel/plugin-syntax-logical-assignment-operators@npm:^7.10.4": + version: 7.10.4 + resolution: "@babel/plugin-syntax-logical-assignment-operators@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/2594cfbe29411ad5bc2ad4058de7b2f6a8c5b86eda525a993959438615479e59c012c14aec979e538d60a584a1a799b60d1b8942c3b18468cb9d99b8fd34cd0b + languageName: node + linkType: hard + +"@babel/plugin-syntax-nullish-coalescing-operator@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-nullish-coalescing-operator@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/2024fbb1162899094cfc81152449b12bd0cc7053c6d4bda8ac2852545c87d0a851b1b72ed9560673cbf3ef6248257262c3c04aabf73117215c1b9cc7dd2542ce + languageName: node + linkType: hard + +"@babel/plugin-syntax-numeric-separator@npm:^7.10.4": + version: 7.10.4 + resolution: "@babel/plugin-syntax-numeric-separator@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/c55a82b3113480942c6aa2fcbe976ff9caa74b7b1109ff4369641dfbc88d1da348aceb3c31b6ed311c84d1e7c479440b961906c735d0ab494f688bf2fd5b9bb9 + languageName: node + linkType: hard + +"@babel/plugin-syntax-object-rest-spread@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-object-rest-spread@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/ee1eab52ea6437e3101a0a7018b0da698545230015fc8ab129d292980ec6dff94d265e9e90070e8ae5fed42f08f1622c14c94552c77bcac784b37f503a82ff26 + languageName: node + linkType: hard + +"@babel/plugin-syntax-optional-catch-binding@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-optional-catch-binding@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/27e2493ab67a8ea6d693af1287f7e9acec206d1213ff107a928e85e173741e1d594196f99fec50e9dde404b09164f39dec5864c767212154ffe1caa6af0bc5af + languageName: node + linkType: hard + +"@babel/plugin-syntax-optional-chaining@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-optional-chaining@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/46edddf2faa6ebf94147b8e8540dfc60a5ab718e2de4d01b2c0bdf250a4d642c2bd47cbcbb739febcb2bf75514dbcefad3c52208787994b8d0f8822490f55e81 + languageName: node + linkType: hard + +"@babel/plugin-syntax-private-property-in-object@npm:^7.14.5": + version: 7.14.5 + resolution: "@babel/plugin-syntax-private-property-in-object@npm:7.14.5" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/69822772561706c87f0a65bc92d0772cea74d6bc0911537904a676d5ff496a6d3ac4e05a166d8125fce4a16605bace141afc3611074e170a994e66e5397787f3 + languageName: node + linkType: hard + +"@babel/plugin-syntax-top-level-await@npm:^7.14.5": + version: 7.14.5 + resolution: "@babel/plugin-syntax-top-level-await@npm:7.14.5" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/14bf6e65d5bc1231ffa9def5f0ef30b19b51c218fcecaa78cd1bdf7939dfdf23f90336080b7f5196916368e399934ce5d581492d8292b46a2fb569d8b2da106f + languageName: node + linkType: hard + +"@babel/plugin-syntax-typescript@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-syntax-typescript@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/cdabd2e8010fb0ad15b49c2c270efc97c4bfe109ead36c7bbcf22da7a74bc3e49702fc4f22f12d2d6049e8e22a5769258df1fd05f0420ae45e11bdd5bc07805a + languageName: node + linkType: hard + +"@babel/plugin-syntax-unicode-sets-regex@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/plugin-syntax-unicode-sets-regex@npm:7.18.6" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.18.6" + "@babel/helper-plugin-utils": "npm:^7.18.6" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/9144e5b02a211a4fb9a0ce91063f94fbe1004e80bde3485a0910c9f14897cf83fabd8c21267907cff25db8e224858178df0517f14333cfcf3380ad9a4139cb50 + languageName: node + linkType: hard + +"@babel/plugin-transform-arrow-functions@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-arrow-functions@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/6ac05a54e5582f34ac6d5dc26499e227227ec1c7fa6fc8de1f3d40c275f140d3907f79bbbd49304da2d7008a5ecafb219d0b71d78ee3290ca22020d878041245 + languageName: node + linkType: hard + +"@babel/plugin-transform-async-generator-functions@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-transform-async-generator-functions@npm:7.25.0" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-remap-async-to-generator": "npm:^7.25.0" + "@babel/plugin-syntax-async-generators": "npm:^7.8.4" + "@babel/traverse": "npm:^7.25.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/5348c3a33d16e0d62f13482c6fa432185ba096d58880b08d42450f7db662d6b03e6149d495c8620897dcd3da35061068cbd6c09da7d0ec95743e55a788809e4e + languageName: node + linkType: hard + +"@babel/plugin-transform-async-to-generator@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-async-to-generator@npm:7.24.7" + dependencies: + "@babel/helper-module-imports": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-remap-async-to-generator": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/83c82e243898875af8457972a26ab29baf8a2078768ee9f35141eb3edff0f84b165582a2ff73e90a9e08f5922bf813dbf15a85c1213654385198f4591c0dc45d + languageName: node + linkType: hard + +"@babel/plugin-transform-block-scoped-functions@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-block-scoped-functions@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/113e86de4612ae91773ff5cb6b980f01e1da7e26ae6f6012127415d7ae144e74987bc23feb97f63ba4bc699331490ddea36eac004d76a20d5369e4cc6a7f61cd + languageName: node + linkType: hard + +"@babel/plugin-transform-block-scoping@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-transform-block-scoping@npm:7.25.0" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/382931c75a5d0ea560387e76cb57b03461300527e4784efcb2fb62f36c1eb0ab331327b6034def256baa0cad9050925a61f9c0d56261b6afd6a29c3065fb0bd4 + languageName: node + linkType: hard + +"@babel/plugin-transform-class-properties@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-class-properties@npm:7.24.7" + dependencies: + "@babel/helper-create-class-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/75018a466c7ede3d2397e158891c224ba7fca72864506ce067ddbc02fc65191d44da4d6379c996d0c7f09019e26b5c3f5f1d3a639cd98366519723886f0689d0 + languageName: node + linkType: hard + +"@babel/plugin-transform-class-static-block@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-class-static-block@npm:7.24.7" + dependencies: + "@babel/helper-create-class-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-class-static-block": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.12.0 + checksum: 10c0/b0ade39a3d09dce886f79dbd5907c3d99b48167eddb6b9bbde24a0598129654d7017e611c20494cdbea48b07ac14397cd97ea34e3754bbb2abae4e698128eccb + languageName: node + linkType: hard + +"@babel/plugin-transform-classes@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-transform-classes@npm:7.25.0" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-compilation-targets": "npm:^7.24.8" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-replace-supers": "npm:^7.25.0" + "@babel/traverse": "npm:^7.25.0" + globals: "npm:^11.1.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/4451dccf8a7979427ae042afe381233f30764a8072faf0de1337a4fc297c6d7cb40df9e28931ac096e5b56392d0cd97d3ce10aee68288150a8701624d362a791 + languageName: node + linkType: hard + +"@babel/plugin-transform-computed-properties@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-computed-properties@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/template": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/25636dbc1f605c0b8bc60aa58628a916b689473d11551c9864a855142e36742fe62d4a70400ba3b74902338e77fb3d940376c0a0ba154b6b7ec5367175233b49 + languageName: node + linkType: hard + +"@babel/plugin-transform-destructuring@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/plugin-transform-destructuring@npm:7.24.8" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/804968c1d5f5072c717505296c1e5d5ec33e90550423de66de82bbcb78157156e8470bbe77a04ab8c710a88a06360a30103cf223ac7eff4829adedd6150de5ce + languageName: node + linkType: hard + +"@babel/plugin-transform-dotall-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-dotall-regex@npm:7.24.7" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/793f14c9494972d294b7e7b97b747f47874b6d57d7804d3443c701becf5db192c9311be6a1835c07664486df1f5c60d33196c36fb7e11a53015e476b4c145b33 + languageName: node + linkType: hard + +"@babel/plugin-transform-duplicate-keys@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-duplicate-keys@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/75ff7ec1117ac500e77bf20a144411d39c0fdd038f108eec061724123ce6d1bb8d5bd27968e466573ee70014f8be0043361cdb0ef388f8a182d1d97ad67e51b9 + languageName: node + linkType: hard + +"@babel/plugin-transform-duplicate-named-capturing-groups-regex@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-transform-duplicate-named-capturing-groups-regex@npm:7.25.0" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.25.0" + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/1c9b57ddd9b33696e88911d0e7975e1573ebc46219c4b30eb1dc746cbb71aedfac6f6dab7fdfdec54dd58f31468bf6ab56b157661ea4ffe58f906d71f89544c8 + languageName: node + linkType: hard + +"@babel/plugin-transform-dynamic-import@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-dynamic-import@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-dynamic-import": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/eeda48372efd0a5103cb22dadb13563c975bce18ae85daafbb47d57bb9665d187da9d4fe8d07ac0a6e1288afcfcb73e4e5618bf75ff63fddf9736bfbf225203b + languageName: node + linkType: hard + +"@babel/plugin-transform-exponentiation-operator@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-exponentiation-operator@npm:7.24.7" + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/ace3e11c94041b88848552ba8feb39ae4d6cad3696d439ff51445bd2882d8b8775d85a26c2c0edb9b5e38c9e6013cc11b0dea89ec8f93c7d9d7ee95e3645078c + languageName: node + linkType: hard + +"@babel/plugin-transform-export-namespace-from@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-export-namespace-from@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-export-namespace-from": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/4e144d7f1c57bc63b4899dbbbdfed0880f2daa75ea9c7251c7997f106e4b390dc362175ab7830f11358cb21f6b972ca10a43a2e56cd789065f7606b082674c0c + languageName: node + linkType: hard + +"@babel/plugin-transform-for-of@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-for-of@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/77629b1173e55d07416f05ba7353caa09d2c2149da2ca26721ab812209b63689d1be45116b68eadc011c49ced59daf5320835b15245eb7ae93ae0c5e8277cfc0 + languageName: node + linkType: hard + +"@babel/plugin-transform-function-name@npm:^7.25.1": + version: 7.25.1 + resolution: "@babel/plugin-transform-function-name@npm:7.25.1" + dependencies: + "@babel/helper-compilation-targets": "npm:^7.24.8" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/traverse": "npm:^7.25.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/e74912174d5e33d1418b840443c2e226a7b76cc017c1ed20ee30a566e4f1794d4a123be03180da046241576e8b692731807ba1f52608922acf1cb2cb6957593f + languageName: node + linkType: hard + +"@babel/plugin-transform-json-strings@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-json-strings@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-json-strings": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/17c72cd5bf3e90e722aabd333559275f3309e3fa0b9cea8c2944ab83ae01502c71a2be05da5101edc02b3fc8df15a8dbb9b861cbfcc8a52bf5e797cf01d3a40a + languageName: node + linkType: hard + +"@babel/plugin-transform-literals@npm:^7.25.2": + version: 7.25.2 + resolution: "@babel/plugin-transform-literals@npm:7.25.2" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/0796883217b0885d37e7f6d350773be349e469a812b6bf11ccf862a6edf65103d3e7c849529d65381b441685c12e756751d8c2489a0fd3f8139bb5ef93185f58 + languageName: node + linkType: hard + +"@babel/plugin-transform-logical-assignment-operators@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-logical-assignment-operators@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-logical-assignment-operators": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/dbe882eb9053931f2ab332c50fc7c2a10ef507d6421bd9831adbb4cb7c9f8e1e5fbac4fbd2e007f6a1bf1df1843547559434012f118084dc0bf42cda3b106272 + languageName: node + linkType: hard + +"@babel/plugin-transform-member-expression-literals@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-member-expression-literals@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/e789ae359bdf2d20e90bedef18dfdbd965c9ebae1cee398474a0c349590fda7c8b874e1a2ceee62e47e5e6ec1730e76b0f24e502164357571854271fc12cc684 + languageName: node + linkType: hard + +"@babel/plugin-transform-modules-amd@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-modules-amd@npm:7.24.7" + dependencies: + "@babel/helper-module-transforms": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/6df7de7fce34117ca4b2fa07949b12274c03668cbfe21481c4037b6300796d50ae40f4f170527b61b70a67f26db906747797e30dbd0d9809a441b6e220b5728f + languageName: node + linkType: hard + +"@babel/plugin-transform-modules-commonjs@npm:^7.24.7, @babel/plugin-transform-modules-commonjs@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/plugin-transform-modules-commonjs@npm:7.24.8" + dependencies: + "@babel/helper-module-transforms": "npm:^7.24.8" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-simple-access": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/f1cf552307ebfced20d3907c1dd8be941b277f0364aa655e2b5fee828c84c54065745183104dae86f1f93ea0406db970a463ef7ceaaed897623748e99640e5a7 + languageName: node + linkType: hard + +"@babel/plugin-transform-modules-systemjs@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/plugin-transform-modules-systemjs@npm:7.25.0" + dependencies: + "@babel/helper-module-transforms": "npm:^7.25.0" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-validator-identifier": "npm:^7.24.7" + "@babel/traverse": "npm:^7.25.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/fca6198da71237e4bb1274b3b67a0c81d56013c9535361242b6bfa87d70a9597854aadb45d4d8203369be4a655e158be2a5d20af0040b1f8d1bfc47db3ad7b68 + languageName: node + linkType: hard + +"@babel/plugin-transform-modules-umd@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-modules-umd@npm:7.24.7" + dependencies: + "@babel/helper-module-transforms": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/7791d290121db210e4338b94b4a069a1a79e4c7a8d7638d8159a97b281851bbed3048dac87a4ae718ad963005e6c14a5d28e6db2eeb2b04e031cee92fb312f85 + languageName: node + linkType: hard + +"@babel/plugin-transform-named-capturing-groups-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-named-capturing-groups-regex@npm:7.24.7" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/41a0b0f2d0886318237440aa3b489f6d0305361d8671121777d9ff89f9f6de9d0c02ce93625049061426c8994064ef64deae8b819d1b14c00374a6a2336fb5d9 + languageName: node + linkType: hard + +"@babel/plugin-transform-new-target@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-new-target@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/2540808a35e1a978e537334c43dab439cf24c93e7beb213a2e71902f6710e60e0184316643790c0a6644e7a8021e52f7ab8165e6b3e2d6651be07bdf517b67df + languageName: node + linkType: hard + +"@babel/plugin-transform-nullish-coalescing-operator@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-nullish-coalescing-operator@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-nullish-coalescing-operator": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/7243c8ff734ed5ef759dd8768773c4b443c12e792727e759a1aec2c7fa2bfdd24f1ecb42e292a7b3d8bd3d7f7b861cf256a8eb4ba144fc9cc463892c303083d9 + languageName: node + linkType: hard + +"@babel/plugin-transform-numeric-separator@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-numeric-separator@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-numeric-separator": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/e18e09ca5a6342645d00ede477731aa6e8714ff357efc9d7cda5934f1703b3b6fb7d3298dce3ce3ba53e9ff1158eab8f1aadc68874cc21a6099d33a1ca457789 + languageName: node + linkType: hard + +"@babel/plugin-transform-object-rest-spread@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-object-rest-spread@npm:7.24.7" + dependencies: + "@babel/helper-compilation-targets": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-object-rest-spread": "npm:^7.8.3" + "@babel/plugin-transform-parameters": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/9ad64bc003f583030f9da50614b485852f8edac93f8faf5d1cd855201a4852f37c5255ae4daf70dd4375bdd4874e16e39b91f680d4668ec219ba05441ce286eb + languageName: node + linkType: hard + +"@babel/plugin-transform-object-super@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-object-super@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-replace-supers": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/770cebb4b4e1872c216b17069db9a13b87dfee747d359dc56d9fcdd66e7544f92dc6ab1861a4e7e0528196aaff2444e4f17dc84efd8eaf162d542b4ba0943869 + languageName: node + linkType: hard + +"@babel/plugin-transform-optional-catch-binding@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-optional-catch-binding@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-optional-catch-binding": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/1e2f10a018f7d03b3bde6c0b70d063df8d5dd5209861d4467726cf834f5e3d354e2276079dc226aa8e6ece35f5c9b264d64b8229a8bb232829c01e561bcfb07a + languageName: node + linkType: hard + +"@babel/plugin-transform-optional-chaining@npm:^7.24.7, @babel/plugin-transform-optional-chaining@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/plugin-transform-optional-chaining@npm:7.24.8" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + "@babel/plugin-syntax-optional-chaining": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/4ffbe1aad7dec7c9aa2bf6ceb4b2f91f96815b2784f2879bde80e46934f59d64a12cb2c6262e40897c4754d77d2c35d8a5cfed63044fdebf94978b1ed3d14b17 + languageName: node + linkType: hard + +"@babel/plugin-transform-parameters@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-parameters@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/53bf190d6926771545d5184f1f5f3f5144d0f04f170799ad46a43f683a01fab8d5fe4d2196cf246774530990c31fe1f2b9f0def39f0a5ddbb2340b924f5edf01 + languageName: node + linkType: hard + +"@babel/plugin-transform-private-methods@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-private-methods@npm:7.24.7" + dependencies: + "@babel/helper-create-class-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/5b7bf923b738fbe3ad6c33b260e0a7451be288edfe4ef516303fa787a1870cd87533bfbf61abb779c22ed003c2fc484dec2436fe75a48756f686c0241173d364 + languageName: node + linkType: hard + +"@babel/plugin-transform-private-property-in-object@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-private-property-in-object@npm:7.24.7" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-create-class-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/plugin-syntax-private-property-in-object": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/c6fa7defb90b1b0ed46f24ff94ff2e77f44c1f478d1090e81712f33cf992dda5ba347016f030082a2f770138bac6f4a9c2c1565e9f767a125901c77dd9c239ba + languageName: node + linkType: hard + +"@babel/plugin-transform-property-literals@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-property-literals@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/52564b58f3d111dc02d241d5892a4b01512e98dfdf6ef11b0ed62f8b11b0acacccef0fc229b44114fe8d1a57a8b70780b11bdd18b807d3754a781a07d8f57433 + languageName: node + linkType: hard + +"@babel/plugin-transform-react-constant-elements@npm:^7.21.3": + version: 7.25.1 + resolution: "@babel/plugin-transform-react-constant-elements@npm:7.25.1" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/8e9a61e8d74804ad3e4c8051463b2d8c42be5aa1f381f7b0db3ac8696a5cb5faead54036b1e4bcd53f6ab74c0bb3e45e4d9a1a2f50b9a575a8d7965b77d89c28 + languageName: node + linkType: hard + +"@babel/plugin-transform-react-display-name@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-react-display-name@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/c14a07a9e75723c96f1a0a306b8a8e899ff1c6a0cc3d62bcda79bb1b54e4319127b258651c513a1a47da152cdc22e16525525a30ae5933a2980c7036fd0b4d24 + languageName: node + linkType: hard + +"@babel/plugin-transform-react-jsx-development@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-react-jsx-development@npm:7.24.7" + dependencies: + "@babel/plugin-transform-react-jsx": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/fce647db50f90a5291681f0f97865d9dc76981262dff71d6d0332e724b85343de5860c26f9e9a79e448d61e1d70916b07ce91e8c7f2b80dceb4b16aee41794d8 + languageName: node + linkType: hard + +"@babel/plugin-transform-react-jsx@npm:^7.24.7": + version: 7.25.2 + resolution: "@babel/plugin-transform-react-jsx@npm:7.25.2" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-module-imports": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/plugin-syntax-jsx": "npm:^7.24.7" + "@babel/types": "npm:^7.25.2" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/8c5b515f38118471197605e02bea54a8a4283010e3c55bad8cfb78de59ad63612b14d40baca63689afdc9d57b147aac4c7794fe5f7736c9e1ed6dd38784be624 + languageName: node + linkType: hard + +"@babel/plugin-transform-react-pure-annotations@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-react-pure-annotations@npm:7.24.7" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/fae517d293d9c93b7b920458c3e4b91cb0400513889af41ba184a5f3acc8bfef27242cc262741bb8f87870df376f1733a0d0f52b966d342e2aaaf5607af8f73d + languageName: node + linkType: hard + +"@babel/plugin-transform-regenerator@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-regenerator@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + regenerator-transform: "npm:^0.15.2" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/d2dc2c788fdae9d97217e70d46ba8ca9db0035c398dc3e161552b0c437113719a75c04f201f9c91ddc8d28a1da60d0b0853f616dead98a396abb9c845c44892b + languageName: node + linkType: hard + +"@babel/plugin-transform-reserved-words@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-reserved-words@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/2229de2768615e7f5dc0bbc55bc121b5678fd6d2febd46c74a58e42bb894d74cd5955c805880f4e02d0e1cf94f6886270eda7fafc1be9305a1ec3b9fd1d063f5 + languageName: node + linkType: hard + +"@babel/plugin-transform-runtime@npm:^7.22.9": + version: 7.24.7 + resolution: "@babel/plugin-transform-runtime@npm:7.24.7" + dependencies: + "@babel/helper-module-imports": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + babel-plugin-polyfill-corejs2: "npm:^0.4.10" + babel-plugin-polyfill-corejs3: "npm:^0.10.1" + babel-plugin-polyfill-regenerator: "npm:^0.6.1" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/a33f5095872bbba00b8ee553dfe6941477e69a017a2e65e9dd86e80dab5c627635093b796eb1eb22aaaf2f874704f63ad1d99b952b83b59ef6b368ae04e5bb41 + languageName: node + linkType: hard + +"@babel/plugin-transform-shorthand-properties@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-shorthand-properties@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/41b155bdbb3be66618358488bf7731b3b2e8fff2de3dbfd541847720a9debfcec14db06a117abedd03c9cd786db20a79e2a86509a4f19513f6e1b610520905cf + languageName: node + linkType: hard + +"@babel/plugin-transform-spread@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-spread@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/facba1553035f76b0d2930d4ada89a8cd0f45b79579afd35baefbfaf12e3b86096995f4b0c402cf9ee23b3f2ea0a4460c3b1ec0c192d340962c948bb223d4e66 + languageName: node + linkType: hard + +"@babel/plugin-transform-sticky-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-sticky-regex@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/5a74ed2ed0a3ab51c3d15fcaf09d9e2fe915823535c7a4d7b019813177d559b69677090e189ec3d5d08b619483eb5ad371fbcfbbff5ace2a76ba33ee566a1109 + languageName: node + linkType: hard + +"@babel/plugin-transform-template-literals@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-template-literals@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/3630f966257bcace122f04d3157416a09d40768c44c3a800855da81146b009187daa21859d1c3b7d13f4e19e8888e60613964b175b2275d451200fb6d8d6cfe6 + languageName: node + linkType: hard + +"@babel/plugin-transform-typeof-symbol@npm:^7.24.8": + version: 7.24.8 + resolution: "@babel/plugin-transform-typeof-symbol@npm:7.24.8" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.8" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/2f570a4fbbdc5fd85f48165a97452826560051e3b8efb48c3bb0a0a33ee8485633439e7b71bfe3ef705583a1df43f854f49125bd759abdedc195b2cf7e60012a + languageName: node + linkType: hard + +"@babel/plugin-transform-typescript@npm:^7.24.7": + version: 7.25.2 + resolution: "@babel/plugin-transform-typescript@npm:7.25.2" + dependencies: + "@babel/helper-annotate-as-pure": "npm:^7.24.7" + "@babel/helper-create-class-features-plugin": "npm:^7.25.0" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.24.7" + "@babel/plugin-syntax-typescript": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/b3c941da39ee7ecf72df1b78a01d4108160438245f2ab61befe182f51d17fd0034733c6d079b7efad81e03a66438aa3881a671cd68c5eb0fc775df86b88df996 + languageName: node + linkType: hard + +"@babel/plugin-transform-unicode-escapes@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-unicode-escapes@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/8b18e2e66af33471a6971289492beff5c240e56727331db1d34c4338a6a368a82a7ed6d57ec911001b6d65643aed76531e1e7cac93265fb3fb2717f54d845e69 + languageName: node + linkType: hard + +"@babel/plugin-transform-unicode-property-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-unicode-property-regex@npm:7.24.7" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/bc57656eb94584d1b74a385d378818ac2b3fca642e3f649fead8da5fb3f9de22f8461185936915dfb33d5a9104e62e7a47828331248b09d28bb2d59e9276de3e + languageName: node + linkType: hard + +"@babel/plugin-transform-unicode-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-unicode-regex@npm:7.24.7" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/83f72a345b751566b601dc4d07e9f2c8f1bc0e0c6f7abb56ceb3095b3c9d304de73f85f2f477a09f8cc7edd5e65afd0ff9e376cdbcbea33bc0c28f3705b38fd9 + languageName: node + linkType: hard + +"@babel/plugin-transform-unicode-sets-regex@npm:^7.24.7": + version: 7.24.7 + resolution: "@babel/plugin-transform-unicode-sets-regex@npm:7.24.7" + dependencies: + "@babel/helper-create-regexp-features-plugin": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 10c0/7457c0ee8e80a80cb6fdc1fe54ab115b52815627616ce9151be8ef292fc99d04a910ec24f11382b4f124b89374264396892b086886bd2a9c2317904d87c9b21b + languageName: node + linkType: hard + +"@babel/preset-env@npm:^7.20.2, @babel/preset-env@npm:^7.22.9": + version: 7.25.3 + resolution: "@babel/preset-env@npm:7.25.3" + dependencies: + "@babel/compat-data": "npm:^7.25.2" + "@babel/helper-compilation-targets": "npm:^7.25.2" + "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-validator-option": "npm:^7.24.8" + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "npm:^7.25.3" + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "npm:^7.25.0" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "npm:^7.25.0" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "npm:^7.24.7" + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "npm:^7.25.0" + "@babel/plugin-proposal-private-property-in-object": "npm:7.21.0-placeholder-for-preset-env.2" + "@babel/plugin-syntax-async-generators": "npm:^7.8.4" + "@babel/plugin-syntax-class-properties": "npm:^7.12.13" + "@babel/plugin-syntax-class-static-block": "npm:^7.14.5" + "@babel/plugin-syntax-dynamic-import": "npm:^7.8.3" + "@babel/plugin-syntax-export-namespace-from": "npm:^7.8.3" + "@babel/plugin-syntax-import-assertions": "npm:^7.24.7" + "@babel/plugin-syntax-import-attributes": "npm:^7.24.7" + "@babel/plugin-syntax-import-meta": "npm:^7.10.4" + "@babel/plugin-syntax-json-strings": "npm:^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators": "npm:^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator": "npm:^7.8.3" + "@babel/plugin-syntax-numeric-separator": "npm:^7.10.4" + "@babel/plugin-syntax-object-rest-spread": "npm:^7.8.3" + "@babel/plugin-syntax-optional-catch-binding": "npm:^7.8.3" + "@babel/plugin-syntax-optional-chaining": "npm:^7.8.3" + "@babel/plugin-syntax-private-property-in-object": "npm:^7.14.5" + "@babel/plugin-syntax-top-level-await": "npm:^7.14.5" + "@babel/plugin-syntax-unicode-sets-regex": "npm:^7.18.6" + "@babel/plugin-transform-arrow-functions": "npm:^7.24.7" + "@babel/plugin-transform-async-generator-functions": "npm:^7.25.0" + "@babel/plugin-transform-async-to-generator": "npm:^7.24.7" + "@babel/plugin-transform-block-scoped-functions": "npm:^7.24.7" + "@babel/plugin-transform-block-scoping": "npm:^7.25.0" + "@babel/plugin-transform-class-properties": "npm:^7.24.7" + "@babel/plugin-transform-class-static-block": "npm:^7.24.7" + "@babel/plugin-transform-classes": "npm:^7.25.0" + "@babel/plugin-transform-computed-properties": "npm:^7.24.7" + "@babel/plugin-transform-destructuring": "npm:^7.24.8" + "@babel/plugin-transform-dotall-regex": "npm:^7.24.7" + "@babel/plugin-transform-duplicate-keys": "npm:^7.24.7" + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "npm:^7.25.0" + "@babel/plugin-transform-dynamic-import": "npm:^7.24.7" + "@babel/plugin-transform-exponentiation-operator": "npm:^7.24.7" + "@babel/plugin-transform-export-namespace-from": "npm:^7.24.7" + "@babel/plugin-transform-for-of": "npm:^7.24.7" + "@babel/plugin-transform-function-name": "npm:^7.25.1" + "@babel/plugin-transform-json-strings": "npm:^7.24.7" + "@babel/plugin-transform-literals": "npm:^7.25.2" + "@babel/plugin-transform-logical-assignment-operators": "npm:^7.24.7" + "@babel/plugin-transform-member-expression-literals": "npm:^7.24.7" + "@babel/plugin-transform-modules-amd": "npm:^7.24.7" + "@babel/plugin-transform-modules-commonjs": "npm:^7.24.8" + "@babel/plugin-transform-modules-systemjs": "npm:^7.25.0" + "@babel/plugin-transform-modules-umd": "npm:^7.24.7" + "@babel/plugin-transform-named-capturing-groups-regex": "npm:^7.24.7" + "@babel/plugin-transform-new-target": "npm:^7.24.7" + "@babel/plugin-transform-nullish-coalescing-operator": "npm:^7.24.7" + "@babel/plugin-transform-numeric-separator": "npm:^7.24.7" + "@babel/plugin-transform-object-rest-spread": "npm:^7.24.7" + "@babel/plugin-transform-object-super": "npm:^7.24.7" + "@babel/plugin-transform-optional-catch-binding": "npm:^7.24.7" + "@babel/plugin-transform-optional-chaining": "npm:^7.24.8" + "@babel/plugin-transform-parameters": "npm:^7.24.7" + "@babel/plugin-transform-private-methods": "npm:^7.24.7" + "@babel/plugin-transform-private-property-in-object": "npm:^7.24.7" + "@babel/plugin-transform-property-literals": "npm:^7.24.7" + "@babel/plugin-transform-regenerator": "npm:^7.24.7" + "@babel/plugin-transform-reserved-words": "npm:^7.24.7" + "@babel/plugin-transform-shorthand-properties": "npm:^7.24.7" + "@babel/plugin-transform-spread": "npm:^7.24.7" + "@babel/plugin-transform-sticky-regex": "npm:^7.24.7" + "@babel/plugin-transform-template-literals": "npm:^7.24.7" + "@babel/plugin-transform-typeof-symbol": "npm:^7.24.8" + "@babel/plugin-transform-unicode-escapes": "npm:^7.24.7" + "@babel/plugin-transform-unicode-property-regex": "npm:^7.24.7" + "@babel/plugin-transform-unicode-regex": "npm:^7.24.7" + "@babel/plugin-transform-unicode-sets-regex": "npm:^7.24.7" + "@babel/preset-modules": "npm:0.1.6-no-external-plugins" + babel-plugin-polyfill-corejs2: "npm:^0.4.10" + babel-plugin-polyfill-corejs3: "npm:^0.10.4" + babel-plugin-polyfill-regenerator: "npm:^0.6.1" + core-js-compat: "npm:^3.37.1" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/9287dc2e296fe2aa3367d84c2a799db17c9d1e48bba86525f47c6f51f5ba2e2cce454f45f4ae2ef928f9077c0640b04556b55b94835675ceeca94a0c5133205e + languageName: node + linkType: hard + +"@babel/preset-modules@npm:0.1.6-no-external-plugins": + version: 0.1.6-no-external-plugins + resolution: "@babel/preset-modules@npm:0.1.6-no-external-plugins" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.0.0" + "@babel/types": "npm:^7.4.4" + esutils: "npm:^2.0.2" + peerDependencies: + "@babel/core": ^7.0.0-0 || ^8.0.0-0 <8.0.0 + checksum: 10c0/9d02f70d7052446c5f3a4fb39e6b632695fb6801e46d31d7f7c5001f7c18d31d1ea8369212331ca7ad4e7877b73231f470b0d559162624128f1b80fe591409e6 + languageName: node + linkType: hard + +"@babel/preset-react@npm:^7.18.6, @babel/preset-react@npm:^7.22.5": + version: 7.24.7 + resolution: "@babel/preset-react@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-validator-option": "npm:^7.24.7" + "@babel/plugin-transform-react-display-name": "npm:^7.24.7" + "@babel/plugin-transform-react-jsx": "npm:^7.24.7" + "@babel/plugin-transform-react-jsx-development": "npm:^7.24.7" + "@babel/plugin-transform-react-pure-annotations": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/9658b685b25cedaadd0b65c4e663fbc7f57394b5036ddb4c99b1a75b0711fb83292c1c625d605c05b73413fc7a6dc20e532627f6a39b6dc8d4e00415479b054c + languageName: node + linkType: hard + +"@babel/preset-typescript@npm:^7.21.0, @babel/preset-typescript@npm:^7.22.5": + version: 7.24.7 + resolution: "@babel/preset-typescript@npm:7.24.7" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-validator-option": "npm:^7.24.7" + "@babel/plugin-syntax-jsx": "npm:^7.24.7" + "@babel/plugin-transform-modules-commonjs": "npm:^7.24.7" + "@babel/plugin-transform-typescript": "npm:^7.24.7" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/986bc0978eedb4da33aba8e1e13a3426dd1829515313b7e8f4ba5d8c18aff1663b468939d471814e7acf4045d326ae6cff37239878d169ac3fe53a8fde71f8ee + languageName: node + linkType: hard + +"@babel/regjsgen@npm:^0.8.0": + version: 0.8.0 + resolution: "@babel/regjsgen@npm:0.8.0" + checksum: 10c0/4f3ddd8c7c96d447e05c8304c1d5ba3a83fcabd8a716bc1091c2f31595cdd43a3a055fff7cb5d3042b8cb7d402d78820fcb4e05d896c605a7d8bcf30f2424c4a + languageName: node + linkType: hard + +"@babel/runtime-corejs3@npm:^7.22.6": + version: 7.25.0 + resolution: "@babel/runtime-corejs3@npm:7.25.0" + dependencies: + core-js-pure: "npm:^3.30.2" + regenerator-runtime: "npm:^0.14.0" + checksum: 10c0/7c9e7896749b5968bc6a7638cf1735e5d2dc791780f4f46daf15a45777780cd0485d1357e92f54b03f815269064dc84d771e83486d49e18b847ffa8cfb6a6afa + languageName: node + linkType: hard + +"@babel/runtime@npm:^7.1.2, @babel/runtime@npm:^7.10.3, @babel/runtime@npm:^7.12.13, @babel/runtime@npm:^7.12.5, @babel/runtime@npm:^7.22.6, @babel/runtime@npm:^7.8.4": + version: 7.25.0 + resolution: "@babel/runtime@npm:7.25.0" + dependencies: + regenerator-runtime: "npm:^0.14.0" + checksum: 10c0/bd3faf246170826cef2071a94d7b47b49d532351360ecd17722d03f6713fd93a3eb3dbd9518faa778d5e8ccad7392a7a604e56bd37aaad3f3aa68d619ccd983d + languageName: node + linkType: hard + +"@babel/template@npm:^7.24.7, @babel/template@npm:^7.25.0": + version: 7.25.0 + resolution: "@babel/template@npm:7.25.0" + dependencies: + "@babel/code-frame": "npm:^7.24.7" + "@babel/parser": "npm:^7.25.0" + "@babel/types": "npm:^7.25.0" + checksum: 10c0/4e31afd873215744c016e02b04f43b9fa23205d6d0766fb2e93eb4091c60c1b88897936adb895fb04e3c23de98dfdcbe31bc98daaa1a4e0133f78bb948e1209b + languageName: node + linkType: hard + +"@babel/traverse@npm:^7.22.8, @babel/traverse@npm:^7.24.7, @babel/traverse@npm:^7.24.8, @babel/traverse@npm:^7.25.0, @babel/traverse@npm:^7.25.1, @babel/traverse@npm:^7.25.2, @babel/traverse@npm:^7.25.3": + version: 7.25.3 + resolution: "@babel/traverse@npm:7.25.3" + dependencies: + "@babel/code-frame": "npm:^7.24.7" + "@babel/generator": "npm:^7.25.0" + "@babel/parser": "npm:^7.25.3" + "@babel/template": "npm:^7.25.0" + "@babel/types": "npm:^7.25.2" + debug: "npm:^4.3.1" + globals: "npm:^11.1.0" + checksum: 10c0/4c8a1966fa90b53a783a4afd2fcdaa6ab1a912e6621dca9fcc6633e80ccb9491620e88caf73b537da4e16cefd537b548c87d7087868d5b0066414dea375c0e9b + languageName: node + linkType: hard + +"@babel/types@npm:^7.0.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.21.3, @babel/types@npm:^7.24.7, @babel/types@npm:^7.24.8, @babel/types@npm:^7.25.0, @babel/types@npm:^7.25.2, @babel/types@npm:^7.4.4": + version: 7.25.2 + resolution: "@babel/types@npm:7.25.2" + dependencies: + "@babel/helper-string-parser": "npm:^7.24.8" + "@babel/helper-validator-identifier": "npm:^7.24.7" + to-fast-properties: "npm:^2.0.0" + checksum: 10c0/e489435856be239f8cc1120c90a197e4c2865385121908e5edb7223cfdff3768cba18f489adfe0c26955d9e7bbb1fb10625bc2517505908ceb0af848989bd864 + languageName: node + linkType: hard + +"@braintree/sanitize-url@npm:^6.0.1": + version: 6.0.4 + resolution: "@braintree/sanitize-url@npm:6.0.4" + checksum: 10c0/5d7bac57f3e49931db83f65aaa4fd22f96caa323bf0c7fcf6851fdbed179a8cf29eaa5dd372d340fc51ca5f44345ea5bc0196b36c8b16179888a7c9044313420 + languageName: node + linkType: hard + +"@colors/colors@npm:1.5.0": + version: 1.5.0 + resolution: "@colors/colors@npm:1.5.0" + checksum: 10c0/eb42729851adca56d19a08e48d5a1e95efd2a32c55ae0323de8119052be0510d4b7a1611f2abcbf28c044a6c11e6b7d38f99fccdad7429300c37a8ea5fb95b44 + languageName: node + linkType: hard + +"@discoveryjs/json-ext@npm:0.5.7": + version: 0.5.7 + resolution: "@discoveryjs/json-ext@npm:0.5.7" + checksum: 10c0/e10f1b02b78e4812646ddf289b7d9f2cb567d336c363b266bd50cd223cf3de7c2c74018d91cd2613041568397ef3a4a2b500aba588c6e5bd78c38374ba68f38c + languageName: node + linkType: hard + +"@docsearch/css@npm:3.6.1": + version: 3.6.1 + resolution: "@docsearch/css@npm:3.6.1" + checksum: 10c0/546b7b725044d006fe5fd2061763fbd1f944d9db21c7b86adb2d11e7bd5eee41b102f1ecccb001bb1603ef7503282cc9ad204482db62e4bc0b038c46a9cd9e6d + languageName: node + linkType: hard + +"@docsearch/react@npm:^3.5.2": + version: 3.6.1 + resolution: "@docsearch/react@npm:3.6.1" + dependencies: + "@algolia/autocomplete-core": "npm:1.9.3" + "@algolia/autocomplete-preset-algolia": "npm:1.9.3" + "@docsearch/css": "npm:3.6.1" + algoliasearch: "npm:^4.19.1" + peerDependencies: + "@types/react": ">= 16.8.0 < 19.0.0" + react: ">= 16.8.0 < 19.0.0" + react-dom: ">= 16.8.0 < 19.0.0" + search-insights: ">= 1 < 3" + peerDependenciesMeta: + "@types/react": + optional: true + react: + optional: true + react-dom: + optional: true + search-insights: + optional: true + checksum: 10c0/890d46ed1f971a6af9f64377c9e510e4b39324bfedcc143c7bd35ba883f8fdac3dc844b0a0000059fd3dec16a0443e7f723d65c468ca7bafd03be546caf38479 + languageName: node + linkType: hard + +"@docusaurus/core@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/core@npm:3.5.2" + dependencies: + "@babel/core": "npm:^7.23.3" + "@babel/generator": "npm:^7.23.3" + "@babel/plugin-syntax-dynamic-import": "npm:^7.8.3" + "@babel/plugin-transform-runtime": "npm:^7.22.9" + "@babel/preset-env": "npm:^7.22.9" + "@babel/preset-react": "npm:^7.22.5" + "@babel/preset-typescript": "npm:^7.22.5" + "@babel/runtime": "npm:^7.22.6" + "@babel/runtime-corejs3": "npm:^7.22.6" + "@babel/traverse": "npm:^7.22.8" + "@docusaurus/cssnano-preset": "npm:3.5.2" + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + autoprefixer: "npm:^10.4.14" + babel-loader: "npm:^9.1.3" + babel-plugin-dynamic-import-node: "npm:^2.3.3" + boxen: "npm:^6.2.1" + chalk: "npm:^4.1.2" + chokidar: "npm:^3.5.3" + clean-css: "npm:^5.3.2" + cli-table3: "npm:^0.6.3" + combine-promises: "npm:^1.1.0" + commander: "npm:^5.1.0" + copy-webpack-plugin: "npm:^11.0.0" + core-js: "npm:^3.31.1" + css-loader: "npm:^6.8.1" + css-minimizer-webpack-plugin: "npm:^5.0.1" + cssnano: "npm:^6.1.2" + del: "npm:^6.1.1" + detect-port: "npm:^1.5.1" + escape-html: "npm:^1.0.3" + eta: "npm:^2.2.0" + eval: "npm:^0.1.8" + file-loader: "npm:^6.2.0" + fs-extra: "npm:^11.1.1" + html-minifier-terser: "npm:^7.2.0" + html-tags: "npm:^3.3.1" + html-webpack-plugin: "npm:^5.5.3" + leven: "npm:^3.1.0" + lodash: "npm:^4.17.21" + mini-css-extract-plugin: "npm:^2.7.6" + p-map: "npm:^4.0.0" + postcss: "npm:^8.4.26" + postcss-loader: "npm:^7.3.3" + prompts: "npm:^2.4.2" + react-dev-utils: "npm:^12.0.1" + react-helmet-async: "npm:^1.3.0" + react-loadable: "npm:@docusaurus/react-loadable@6.0.0" + react-loadable-ssr-addon-v5-slorber: "npm:^1.0.1" + react-router: "npm:^5.3.4" + react-router-config: "npm:^5.1.1" + react-router-dom: "npm:^5.3.4" + rtl-detect: "npm:^1.0.4" + semver: "npm:^7.5.4" + serve-handler: "npm:^6.1.5" + shelljs: "npm:^0.8.5" + terser-webpack-plugin: "npm:^5.3.9" + tslib: "npm:^2.6.0" + update-notifier: "npm:^6.0.2" + url-loader: "npm:^4.1.1" + webpack: "npm:^5.88.1" + webpack-bundle-analyzer: "npm:^4.9.0" + webpack-dev-server: "npm:^4.15.1" + webpack-merge: "npm:^5.9.0" + webpackbar: "npm:^5.0.2" + peerDependencies: + "@mdx-js/react": ^3.0.0 + react: ^18.0.0 + react-dom: ^18.0.0 + bin: + docusaurus: bin/docusaurus.mjs + checksum: 10c0/0868fc7cfbc38e7d927d60e927abf883fe442fe723123a58425a5402905a48bfb57b4e59ff555944af54ad3be462380d43e0f737989f6f300f11df2ca29d0498 + languageName: node + linkType: hard + +"@docusaurus/cssnano-preset@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/cssnano-preset@npm:3.5.2" + dependencies: + cssnano-preset-advanced: "npm:^6.1.2" + postcss: "npm:^8.4.38" + postcss-sort-media-queries: "npm:^5.2.0" + tslib: "npm:^2.6.0" + checksum: 10c0/10fd97d66aa7973d86322ac205978edc18636e13dc1f5eb7e6fca5169c4203660bd958f2a483a2b1639d05c1878f5d0eb5f07676eee5d5aa3b71b417d35fa42a + languageName: node + linkType: hard + +"@docusaurus/eslint-plugin@npm:^3.5.2": + version: 3.5.2 + resolution: "@docusaurus/eslint-plugin@npm:3.5.2" + dependencies: + "@typescript-eslint/utils": "npm:^5.62.0" + tslib: "npm:^2.6.0" + peerDependencies: + eslint: ">=6" + checksum: 10c0/7c8e617e45047af20df2099f3d6e2fb63a80213c7cb1236da2fbebb51e53b71c4c78436caeb3fe85f4410b2073fee8480be65be4da1093147d4ea2f4975b63de + languageName: node + linkType: hard + +"@docusaurus/logger@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/logger@npm:3.5.2" + dependencies: + chalk: "npm:^4.1.2" + tslib: "npm:^2.6.0" + checksum: 10c0/5360228a980c024445483c88e14c2f2e69ca7b8386c0c39bd147307b0296277fdf06c27e43dba0e43d9ea6abee7b0269a4d6fe166e57ad5ffb2e093759ff6c03 + languageName: node + linkType: hard + +"@docusaurus/lqip-loader@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/lqip-loader@npm:3.5.2" + dependencies: + "@docusaurus/logger": "npm:3.5.2" + file-loader: "npm:^6.2.0" + lodash: "npm:^4.17.21" + sharp: "npm:^0.32.3" + tslib: "npm:^2.6.0" + checksum: 10c0/44d78834b638bf4e47d527d79ae130f480520cb86133dd18e1e10d0865e891c1a287435475da99f48c8d8ca3d4f863573ab37e0b56bf653f894627dde192e36b + languageName: node + linkType: hard + +"@docusaurus/mdx-loader@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/mdx-loader@npm:3.5.2" + dependencies: + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + "@mdx-js/mdx": "npm:^3.0.0" + "@slorber/remark-comment": "npm:^1.0.0" + escape-html: "npm:^1.0.3" + estree-util-value-to-estree: "npm:^3.0.1" + file-loader: "npm:^6.2.0" + fs-extra: "npm:^11.1.1" + image-size: "npm:^1.0.2" + mdast-util-mdx: "npm:^3.0.0" + mdast-util-to-string: "npm:^4.0.0" + rehype-raw: "npm:^7.0.0" + remark-directive: "npm:^3.0.0" + remark-emoji: "npm:^4.0.0" + remark-frontmatter: "npm:^5.0.0" + remark-gfm: "npm:^4.0.0" + stringify-object: "npm:^3.3.0" + tslib: "npm:^2.6.0" + unified: "npm:^11.0.3" + unist-util-visit: "npm:^5.0.0" + url-loader: "npm:^4.1.1" + vfile: "npm:^6.0.1" + webpack: "npm:^5.88.1" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/52f193578cd3f369c155a2a7a5db532dc482ecb460e3b32ca1111e0036ea8939bfaf4094860929510e639f9a00d1edbbedc797ccdef9eddc381bedaa255d5ab3 + languageName: node + linkType: hard + +"@docusaurus/module-type-aliases@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/module-type-aliases@npm:3.5.2" + dependencies: + "@docusaurus/types": "npm:3.5.2" + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + "@types/react-router-config": "npm:*" + "@types/react-router-dom": "npm:*" + react-helmet-async: "npm:*" + react-loadable: "npm:@docusaurus/react-loadable@6.0.0" + peerDependencies: + react: "*" + react-dom: "*" + checksum: 10c0/5174c8ad4a545b4ef8aa16bae6f6a2d501ab0d4ddd400cca83c55b6b35eac79b1d7cff52d6041da4f0f339a969d72be1f40e57d5ea73a50a61e0688505627e0c + languageName: node + linkType: hard + +"@docusaurus/plugin-content-blog@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-content-blog@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + cheerio: "npm:1.0.0-rc.12" + feed: "npm:^4.2.2" + fs-extra: "npm:^11.1.1" + lodash: "npm:^4.17.21" + reading-time: "npm:^1.5.0" + srcset: "npm:^4.0.0" + tslib: "npm:^2.6.0" + unist-util-visit: "npm:^5.0.0" + utility-types: "npm:^3.10.0" + webpack: "npm:^5.88.1" + peerDependencies: + "@docusaurus/plugin-content-docs": "*" + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/0cdd4944e19c4ed02783be311dd735728a03282585517f48277358373cf46740b5659daa14bdaf58f80e0f949579a97110aa785a15333ad420154acc997471e6 + languageName: node + linkType: hard + +"@docusaurus/plugin-content-docs@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-content-docs@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/module-type-aliases": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + "@types/react-router-config": "npm:^5.0.7" + combine-promises: "npm:^1.1.0" + fs-extra: "npm:^11.1.1" + js-yaml: "npm:^4.1.0" + lodash: "npm:^4.17.21" + tslib: "npm:^2.6.0" + utility-types: "npm:^3.10.0" + webpack: "npm:^5.88.1" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/fd245e323bd2735c9a65bbb50c8411db3bf8b562ad812ef92c4637554b1606aeaf2f2da95ea447a6fb158d96836677d7f95a6a006dae3c4730c231c5527fd7ce + languageName: node + linkType: hard + +"@docusaurus/plugin-content-pages@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-content-pages@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + fs-extra: "npm:^11.1.1" + tslib: "npm:^2.6.0" + webpack: "npm:^5.88.1" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/4ca00fad896976095a64f485c6b58da5426fb8301921b2d3099d3604f3a3485461543e373415b54ce743104ff67f54e4f6fb4364547fce3d8c88be57e1c87426 + languageName: node + linkType: hard + +"@docusaurus/plugin-debug@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-debug@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + fs-extra: "npm:^11.1.1" + react-json-view-lite: "npm:^1.2.0" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/2d47f01154a026b9c9028df72fa87a633772c5079501a8e7c48ca48ba87fd1f4ec6e7e277c8123315cccbc43a9897e45e8a0b8b975cc337a74316eee03f7b320 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-analytics@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-google-analytics@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/19e2fbdb625a0345c7f5571ae39fae5803b32933f7f69ba481daf56b4640d68c899049a8c0a7a774e533723364361a7e56839e4fd279940717c5c35d66c226b5 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-gtag@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-google-gtag@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + "@types/gtag.js": "npm:^0.0.12" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/ba502ae3e0b766b8eebafe89935365199cbc66f9d472950d3d95362619b1f78dddf8e45a73c7e9a1040be965b927ea5ce76037b3f7ee5443c25cab8e6e232934 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-tag-manager@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-google-tag-manager@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/067eed163b41ac03e85b70ec677525479bae6f4b7137e837d81dd48d03ab8c246b52be3236283cbc4607039beddc618adcfe451f91b19e2d41d343cd0952bd73 + languageName: node + linkType: hard + +"@docusaurus/plugin-ideal-image@npm:^3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-ideal-image@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/lqip-loader": "npm:3.5.2" + "@docusaurus/responsive-loader": "npm:^1.7.0" + "@docusaurus/theme-translations": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + "@slorber/react-ideal-image": "npm:^0.0.12" + react-waypoint: "npm:^10.3.0" + sharp: "npm:^0.32.3" + tslib: "npm:^2.6.0" + webpack: "npm:^5.88.1" + peerDependencies: + jimp: "*" + react: ^18.0.0 + react-dom: ^18.0.0 + peerDependenciesMeta: + jimp: + optional: true + checksum: 10c0/e283632e1d8eab316da6d6bfc448ba5dbbab4670c1ff3217d0b9286407561091ea1a0369a155d1861ad31bdaef75b64e855b3a831223de9f633ad0074642165e + languageName: node + linkType: hard + +"@docusaurus/plugin-sitemap@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/plugin-sitemap@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + fs-extra: "npm:^11.1.1" + sitemap: "npm:^7.1.1" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/9490c3a11869fb50abe7d8d9c235d57b18247a2dbe59d2351a6a919f0a4cf5445879e019db049a5dd55cbbb1ce0e19d5f1342e368e593408652f48d19331f961 + languageName: node + linkType: hard + +"@docusaurus/preset-classic@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/preset-classic@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/plugin-content-blog": "npm:3.5.2" + "@docusaurus/plugin-content-docs": "npm:3.5.2" + "@docusaurus/plugin-content-pages": "npm:3.5.2" + "@docusaurus/plugin-debug": "npm:3.5.2" + "@docusaurus/plugin-google-analytics": "npm:3.5.2" + "@docusaurus/plugin-google-gtag": "npm:3.5.2" + "@docusaurus/plugin-google-tag-manager": "npm:3.5.2" + "@docusaurus/plugin-sitemap": "npm:3.5.2" + "@docusaurus/theme-classic": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/theme-search-algolia": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/ea15474b01399a7bf05d6fd8b0edbf2856ffc83baa0d726b6e90c365ffc93ed39a78ac3d5690750f43051387ff96a8b455927ffa712f4589f4e4b45a4490aaaa + languageName: node + linkType: hard + +"@docusaurus/responsive-loader@npm:^1.7.0": + version: 1.7.0 + resolution: "@docusaurus/responsive-loader@npm:1.7.0" + dependencies: + loader-utils: "npm:^2.0.0" + peerDependencies: + jimp: "*" + sharp: "*" + peerDependenciesMeta: + jimp: + optional: true + sharp: + optional: true + checksum: 10c0/2181eda5a8d01c0878300eff128f00f31bd0678931691caf0bc19f560217238b7a0e1729cdf01668836914ac23c06555719b7f2ac465c958458289dccd7f440d + languageName: node + linkType: hard + +"@docusaurus/theme-classic@npm:3.5.2, @docusaurus/theme-classic@npm:^3.5.2": + version: 3.5.2 + resolution: "@docusaurus/theme-classic@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/module-type-aliases": "npm:3.5.2" + "@docusaurus/plugin-content-blog": "npm:3.5.2" + "@docusaurus/plugin-content-docs": "npm:3.5.2" + "@docusaurus/plugin-content-pages": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/theme-translations": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + "@mdx-js/react": "npm:^3.0.0" + clsx: "npm:^2.0.0" + copy-text-to-clipboard: "npm:^3.2.0" + infima: "npm:0.2.0-alpha.44" + lodash: "npm:^4.17.21" + nprogress: "npm:^0.2.0" + postcss: "npm:^8.4.26" + prism-react-renderer: "npm:^2.3.0" + prismjs: "npm:^1.29.0" + react-router-dom: "npm:^5.3.4" + rtlcss: "npm:^4.1.0" + tslib: "npm:^2.6.0" + utility-types: "npm:^3.10.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/b0f1dd2a81b96d5522ce456de77e0edd539ea07406ff370b624d878a46af4b33f66892242bc177bf04a0026831fccd3621d722c174ebb8a05a8e6f6ed07d72c3 + languageName: node + linkType: hard + +"@docusaurus/theme-common@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/theme-common@npm:3.5.2" + dependencies: + "@docusaurus/mdx-loader": "npm:3.5.2" + "@docusaurus/module-type-aliases": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + "@types/react-router-config": "npm:*" + clsx: "npm:^2.0.0" + parse-numeric-range: "npm:^1.3.0" + prism-react-renderer: "npm:^2.3.0" + tslib: "npm:^2.6.0" + utility-types: "npm:^3.10.0" + peerDependencies: + "@docusaurus/plugin-content-docs": "*" + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/ae84a910b98c2b6706110e1580af96e5d87d5b29fe1f085d461932aa9608ee3df90e257d809ddcea5c5d848a160933d16052db1669dd062b5d13870834ac0394 + languageName: node + linkType: hard + +"@docusaurus/theme-mermaid@npm:^3.5.2": + version: 3.5.2 + resolution: "@docusaurus/theme-mermaid@npm:3.5.2" + dependencies: + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/module-type-aliases": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + mermaid: "npm:^10.4.0" + tslib: "npm:^2.6.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/a4f0374ca1c7356dbdf1bfada9ec9737581cd1362d5357b66d634a55932c379e66bcaa32cc2e12ead5691f05e609c6069e827bc3535f9212b80899d8f0591a91 + languageName: node + linkType: hard + +"@docusaurus/theme-search-algolia@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/theme-search-algolia@npm:3.5.2" + dependencies: + "@docsearch/react": "npm:^3.5.2" + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/plugin-content-docs": "npm:3.5.2" + "@docusaurus/theme-common": "npm:3.5.2" + "@docusaurus/theme-translations": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-validation": "npm:3.5.2" + algoliasearch: "npm:^4.18.0" + algoliasearch-helper: "npm:^3.13.3" + clsx: "npm:^2.0.0" + eta: "npm:^2.2.0" + fs-extra: "npm:^11.1.1" + lodash: "npm:^4.17.21" + tslib: "npm:^2.6.0" + utility-types: "npm:^3.10.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/c617528fc0574611e49eb355f99df47e77a295a3c87792f185ec53ce0e7a6b239f017e0d9f8b45d91c87f3c615e9008441978d6daf35debcbb1b48fc9d2d98ee + languageName: node + linkType: hard + +"@docusaurus/theme-translations@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/theme-translations@npm:3.5.2" + dependencies: + fs-extra: "npm:^11.1.1" + tslib: "npm:^2.6.0" + checksum: 10c0/aa427b55a6d642ff30d67d5b9b8bc9f16f92b8902b125d3d6499c59e7e4ece3549a8a8e9fc017ef1cc68d9b9d5426a35812f8bf829c049103607867d605adc7b + languageName: node + linkType: hard + +"@docusaurus/tsconfig@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/tsconfig@npm:3.5.2" + checksum: 10c0/1cde5cfadfc94605ba9a1ec8484bc58700bcff99944fa20c6f6d93599126914dc33f15c3464ee3279cf6becafcea86909d1d25a20f8f97e95c8ddf6b1122eac8 + languageName: node + linkType: hard + +"@docusaurus/types@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/types@npm:3.5.2" + dependencies: + "@mdx-js/mdx": "npm:^3.0.0" + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + commander: "npm:^5.1.0" + joi: "npm:^17.9.2" + react-helmet-async: "npm:^1.3.0" + utility-types: "npm:^3.10.0" + webpack: "npm:^5.88.1" + webpack-merge: "npm:^5.9.0" + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 10c0/a06607a8ed96871d9a2c1239e1d94e584acd5c638f7eb4071feb1f18221c25c9b78794b3f804884db201cfdfc67cecdf37a823efe854f435fb4f5a36b28237d4 + languageName: node + linkType: hard + +"@docusaurus/utils-common@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/utils-common@npm:3.5.2" + dependencies: + tslib: "npm:^2.6.0" + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: 10c0/17723bed0174d98895eff9666e9988757cb1b3562d90045db7a9a90294d686ca5472f5d7c171de7f306148ae24573ae7e959d31167a8dac8c1b4d7606459e056 + languageName: node + linkType: hard + +"@docusaurus/utils-validation@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/utils-validation@npm:3.5.2" + dependencies: + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/utils": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + fs-extra: "npm:^11.2.0" + joi: "npm:^17.9.2" + js-yaml: "npm:^4.1.0" + lodash: "npm:^4.17.21" + tslib: "npm:^2.6.0" + checksum: 10c0/b179f7e68f9e3bfad7d03001ca9280e4122592a8995ea7ca31a8a59c5ce3b568af1177b06b41417c98bcd4cd30a7a054d0c06be8384b3f05be37bf239df96213 + languageName: node + linkType: hard + +"@docusaurus/utils@npm:3.5.2": + version: 3.5.2 + resolution: "@docusaurus/utils@npm:3.5.2" + dependencies: + "@docusaurus/logger": "npm:3.5.2" + "@docusaurus/utils-common": "npm:3.5.2" + "@svgr/webpack": "npm:^8.1.0" + escape-string-regexp: "npm:^4.0.0" + file-loader: "npm:^6.2.0" + fs-extra: "npm:^11.1.1" + github-slugger: "npm:^1.5.0" + globby: "npm:^11.1.0" + gray-matter: "npm:^4.0.3" + jiti: "npm:^1.20.0" + js-yaml: "npm:^4.1.0" + lodash: "npm:^4.17.21" + micromatch: "npm:^4.0.5" + prompts: "npm:^2.4.2" + resolve-pathname: "npm:^3.0.0" + shelljs: "npm:^0.8.5" + tslib: "npm:^2.6.0" + url-loader: "npm:^4.1.1" + utility-types: "npm:^3.10.0" + webpack: "npm:^5.88.1" + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: 10c0/a4d2d530c16ffd93bb84f5bc221efb767cba5915cfabd36f83130ba008cbb03a4d79ec324bb1dd0ef2d25d1317692357ee55ec8df0e9e801022e37c633b80ca9 + languageName: node + linkType: hard + +"@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": + version: 4.4.0 + resolution: "@eslint-community/eslint-utils@npm:4.4.0" + dependencies: + eslint-visitor-keys: "npm:^3.3.0" + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + checksum: 10c0/7e559c4ce59cd3a06b1b5a517b593912e680a7f981ae7affab0d01d709e99cd5647019be8fafa38c350305bc32f1f7d42c7073edde2ab536c745e365f37b607e + languageName: node + linkType: hard + +"@eslint-community/regexpp@npm:^4.10.0, @eslint-community/regexpp@npm:^4.6.1": + version: 4.11.0 + resolution: "@eslint-community/regexpp@npm:4.11.0" + checksum: 10c0/0f6328869b2741e2794da4ad80beac55cba7de2d3b44f796a60955b0586212ec75e6b0253291fd4aad2100ad471d1480d8895f2b54f1605439ba4c875e05e523 + languageName: node + linkType: hard + +"@eslint/eslintrc@npm:^2.1.4": + version: 2.1.4 + resolution: "@eslint/eslintrc@npm:2.1.4" + dependencies: + ajv: "npm:^6.12.4" + debug: "npm:^4.3.2" + espree: "npm:^9.6.0" + globals: "npm:^13.19.0" + ignore: "npm:^5.2.0" + import-fresh: "npm:^3.2.1" + js-yaml: "npm:^4.1.0" + minimatch: "npm:^3.1.2" + strip-json-comments: "npm:^3.1.1" + checksum: 10c0/32f67052b81768ae876c84569ffd562491ec5a5091b0c1e1ca1e0f3c24fb42f804952fdd0a137873bc64303ba368a71ba079a6f691cee25beee9722d94cc8573 + languageName: node + linkType: hard + +"@eslint/js@npm:8.57.0": + version: 8.57.0 + resolution: "@eslint/js@npm:8.57.0" + checksum: 10c0/9a518bb8625ba3350613903a6d8c622352ab0c6557a59fe6ff6178bf882bf57123f9d92aa826ee8ac3ee74b9c6203fe630e9ee00efb03d753962dcf65ee4bd94 + languageName: node + linkType: hard + +"@eslint/js@npm:^9.10.0": + version: 9.10.0 + resolution: "@eslint/js@npm:9.10.0" + checksum: 10c0/2ac45a002dc1ccf25be46ea61001ada8d77248d1313ab4e53f3735e5ae00738a757874e41f62ad6fbd49df7dffeece66e5f53ff0d7b78a99ce4c68e8fea66753 + languageName: node + linkType: hard + +"@hapi/hoek@npm:^9.0.0, @hapi/hoek@npm:^9.3.0": + version: 9.3.0 + resolution: "@hapi/hoek@npm:9.3.0" + checksum: 10c0/a096063805051fb8bba4c947e293c664b05a32b47e13bc654c0dd43813a1cec993bdd8f29ceb838020299e1d0f89f68dc0d62a603c13c9cc8541963f0beca055 + languageName: node + linkType: hard + +"@hapi/topo@npm:^5.1.0": + version: 5.1.0 + resolution: "@hapi/topo@npm:5.1.0" + dependencies: + "@hapi/hoek": "npm:^9.0.0" + checksum: 10c0/b16b06d9357947149e032bdf10151eb71aea8057c79c4046bf32393cb89d0d0f7ca501c40c0f7534a5ceca078de0700d2257ac855c15e59fe4e00bba2f25c86f + languageName: node + linkType: hard + +"@humanwhocodes/config-array@npm:^0.11.14": + version: 0.11.14 + resolution: "@humanwhocodes/config-array@npm:0.11.14" + dependencies: + "@humanwhocodes/object-schema": "npm:^2.0.2" + debug: "npm:^4.3.1" + minimatch: "npm:^3.0.5" + checksum: 10c0/66f725b4ee5fdd8322c737cb5013e19fac72d4d69c8bf4b7feb192fcb83442b035b92186f8e9497c220e58b2d51a080f28a73f7899bc1ab288c3be172c467541 + languageName: node + linkType: hard + +"@humanwhocodes/module-importer@npm:^1.0.1": + version: 1.0.1 + resolution: "@humanwhocodes/module-importer@npm:1.0.1" + checksum: 10c0/909b69c3b86d482c26b3359db16e46a32e0fb30bd306a3c176b8313b9e7313dba0f37f519de6aa8b0a1921349e505f259d19475e123182416a506d7f87e7f529 + languageName: node + linkType: hard + +"@humanwhocodes/object-schema@npm:^2.0.2": + version: 2.0.3 + resolution: "@humanwhocodes/object-schema@npm:2.0.3" + checksum: 10c0/80520eabbfc2d32fe195a93557cef50dfe8c8905de447f022675aaf66abc33ae54098f5ea78548d925aa671cd4ab7c7daa5ad704fe42358c9b5e7db60f80696c + languageName: node + linkType: hard + +"@isaacs/cliui@npm:^8.0.2": + version: 8.0.2 + resolution: "@isaacs/cliui@npm:8.0.2" + dependencies: + string-width: "npm:^5.1.2" + string-width-cjs: "npm:string-width@^4.2.0" + strip-ansi: "npm:^7.0.1" + strip-ansi-cjs: "npm:strip-ansi@^6.0.1" + wrap-ansi: "npm:^8.1.0" + wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" + checksum: 10c0/b1bf42535d49f11dc137f18d5e4e63a28c5569de438a221c369483731e9dac9fb797af554e8bf02b6192d1e5eba6e6402cf93900c3d0ac86391d00d04876789e + languageName: node + linkType: hard + +"@jest/schemas@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/schemas@npm:29.6.3" + dependencies: + "@sinclair/typebox": "npm:^0.27.8" + checksum: 10c0/b329e89cd5f20b9278ae1233df74016ebf7b385e0d14b9f4c1ad18d096c4c19d1e687aa113a9c976b16ec07f021ae53dea811fb8c1248a50ac34fbe009fdf6be + languageName: node + linkType: hard + +"@jest/types@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/types@npm:29.6.3" + dependencies: + "@jest/schemas": "npm:^29.6.3" + "@types/istanbul-lib-coverage": "npm:^2.0.0" + "@types/istanbul-reports": "npm:^3.0.0" + "@types/node": "npm:*" + "@types/yargs": "npm:^17.0.8" + chalk: "npm:^4.0.0" + checksum: 10c0/ea4e493dd3fb47933b8ccab201ae573dcc451f951dc44ed2a86123cd8541b82aa9d2b1031caf9b1080d6673c517e2dcc25a44b2dc4f3fbc37bfc965d444888c0 + languageName: node + linkType: hard + +"@jridgewell/gen-mapping@npm:^0.3.5": + version: 0.3.5 + resolution: "@jridgewell/gen-mapping@npm:0.3.5" + dependencies: + "@jridgewell/set-array": "npm:^1.2.1" + "@jridgewell/sourcemap-codec": "npm:^1.4.10" + "@jridgewell/trace-mapping": "npm:^0.3.24" + checksum: 10c0/1be4fd4a6b0f41337c4f5fdf4afc3bd19e39c3691924817108b82ffcb9c9e609c273f936932b9fba4b3a298ce2eb06d9bff4eb1cc3bd81c4f4ee1b4917e25feb + languageName: node + linkType: hard + +"@jridgewell/resolve-uri@npm:^3.1.0": + version: 3.1.2 + resolution: "@jridgewell/resolve-uri@npm:3.1.2" + checksum: 10c0/d502e6fb516b35032331406d4e962c21fe77cdf1cbdb49c6142bcbd9e30507094b18972778a6e27cbad756209cfe34b1a27729e6fa08a2eb92b33943f680cf1e + languageName: node + linkType: hard + +"@jridgewell/set-array@npm:^1.2.1": + version: 1.2.1 + resolution: "@jridgewell/set-array@npm:1.2.1" + checksum: 10c0/2a5aa7b4b5c3464c895c802d8ae3f3d2b92fcbe84ad12f8d0bfbb1f5ad006717e7577ee1fd2eac00c088abe486c7adb27976f45d2941ff6b0b92b2c3302c60f4 + languageName: node + linkType: hard + +"@jridgewell/source-map@npm:^0.3.3": + version: 0.3.6 + resolution: "@jridgewell/source-map@npm:0.3.6" + dependencies: + "@jridgewell/gen-mapping": "npm:^0.3.5" + "@jridgewell/trace-mapping": "npm:^0.3.25" + checksum: 10c0/6a4ecc713ed246ff8e5bdcc1ef7c49aaa93f7463d948ba5054dda18b02dcc6a055e2828c577bcceee058f302ce1fc95595713d44f5c45e43d459f88d267f2f04 + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14": + version: 1.5.0 + resolution: "@jridgewell/sourcemap-codec@npm:1.5.0" + checksum: 10c0/2eb864f276eb1096c3c11da3e9bb518f6d9fc0023c78344cdc037abadc725172c70314bdb360f2d4b7bffec7f5d657ce006816bc5d4ecb35e61b66132db00c18 + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:^0.3.18, @jridgewell/trace-mapping@npm:^0.3.20, @jridgewell/trace-mapping@npm:^0.3.24, @jridgewell/trace-mapping@npm:^0.3.25": + version: 0.3.25 + resolution: "@jridgewell/trace-mapping@npm:0.3.25" + dependencies: + "@jridgewell/resolve-uri": "npm:^3.1.0" + "@jridgewell/sourcemap-codec": "npm:^1.4.14" + checksum: 10c0/3d1ce6ebc69df9682a5a8896b414c6537e428a1d68b02fcc8363b04284a8ca0df04d0ee3013132252ab14f2527bc13bea6526a912ecb5658f0e39fd2860b4df4 + languageName: node + linkType: hard + +"@leichtgewicht/ip-codec@npm:^2.0.1": + version: 2.0.5 + resolution: "@leichtgewicht/ip-codec@npm:2.0.5" + checksum: 10c0/14a0112bd59615eef9e3446fea018045720cd3da85a98f801a685a818b0d96ef2a1f7227e8d271def546b2e2a0fe91ef915ba9dc912ab7967d2317b1a051d66b + languageName: node + linkType: hard + +"@mdx-js/mdx@npm:^3.0.0": + version: 3.0.1 + resolution: "@mdx-js/mdx@npm:3.0.1" + dependencies: + "@types/estree": "npm:^1.0.0" + "@types/estree-jsx": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + "@types/mdx": "npm:^2.0.0" + collapse-white-space: "npm:^2.0.0" + devlop: "npm:^1.0.0" + estree-util-build-jsx: "npm:^3.0.0" + estree-util-is-identifier-name: "npm:^3.0.0" + estree-util-to-js: "npm:^2.0.0" + estree-walker: "npm:^3.0.0" + hast-util-to-estree: "npm:^3.0.0" + hast-util-to-jsx-runtime: "npm:^2.0.0" + markdown-extensions: "npm:^2.0.0" + periscopic: "npm:^3.0.0" + remark-mdx: "npm:^3.0.0" + remark-parse: "npm:^11.0.0" + remark-rehype: "npm:^11.0.0" + source-map: "npm:^0.7.0" + unified: "npm:^11.0.0" + unist-util-position-from-estree: "npm:^2.0.0" + unist-util-stringify-position: "npm:^4.0.0" + unist-util-visit: "npm:^5.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/8cd7084f1242209bbeef81f69ea670ffffa0656dda2893bbd46b1b2b26078a57f9d993f8f82ad8ba16bc969189235140007185276d7673471827331521eae2e0 + languageName: node + linkType: hard + +"@mdx-js/react@npm:^3.0.0, @mdx-js/react@npm:^3.0.1": + version: 3.0.1 + resolution: "@mdx-js/react@npm:3.0.1" + dependencies: + "@types/mdx": "npm:^2.0.0" + peerDependencies: + "@types/react": ">=16" + react: ">=16" + checksum: 10c0/d210d926ef488d39ad65f04d821936b668eadcdde3b6421e94ec4200ca7ad17f17d24c5cbc543882586af9f08b10e2eea715c728ce6277487945e05c5199f532 + languageName: node + linkType: hard + +"@nicolo-ribaudo/eslint-scope-5-internals@npm:5.1.1-v1": + version: 5.1.1-v1 + resolution: "@nicolo-ribaudo/eslint-scope-5-internals@npm:5.1.1-v1" + dependencies: + eslint-scope: "npm:5.1.1" + checksum: 10c0/75dda3e623b8ad7369ca22552d6beee337a814b2d0e8a32d23edd13fcb65c8082b32c5d86e436f3860dd7ade30d91d5db55d4ef9a08fb5a976c718ecc0d88a74 + languageName: node + linkType: hard + +"@nodelib/fs.scandir@npm:2.1.5": + version: 2.1.5 + resolution: "@nodelib/fs.scandir@npm:2.1.5" + dependencies: + "@nodelib/fs.stat": "npm:2.0.5" + run-parallel: "npm:^1.1.9" + checksum: 10c0/732c3b6d1b1e967440e65f284bd06e5821fedf10a1bea9ed2bb75956ea1f30e08c44d3def9d6a230666574edbaf136f8cfd319c14fd1f87c66e6a44449afb2eb + languageName: node + linkType: hard + +"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": + version: 2.0.5 + resolution: "@nodelib/fs.stat@npm:2.0.5" + checksum: 10c0/88dafe5e3e29a388b07264680dc996c17f4bda48d163a9d4f5c1112979f0ce8ec72aa7116122c350b4e7976bc5566dc3ddb579be1ceaacc727872eb4ed93926d + languageName: node + linkType: hard + +"@nodelib/fs.walk@npm:^1.2.3, @nodelib/fs.walk@npm:^1.2.8": + version: 1.2.8 + resolution: "@nodelib/fs.walk@npm:1.2.8" + dependencies: + "@nodelib/fs.scandir": "npm:2.1.5" + fastq: "npm:^1.6.0" + checksum: 10c0/db9de047c3bb9b51f9335a7bb46f4fcfb6829fb628318c12115fbaf7d369bfce71c15b103d1fc3b464812d936220ee9bc1c8f762d032c9f6be9acc99249095b1 + languageName: node + linkType: hard + +"@npmcli/agent@npm:^2.0.0": + version: 2.2.2 + resolution: "@npmcli/agent@npm:2.2.2" + dependencies: + agent-base: "npm:^7.1.0" + http-proxy-agent: "npm:^7.0.0" + https-proxy-agent: "npm:^7.0.1" + lru-cache: "npm:^10.0.1" + socks-proxy-agent: "npm:^8.0.3" + checksum: 10c0/325e0db7b287d4154ecd164c0815c08007abfb07653cc57bceded17bb7fd240998a3cbdbe87d700e30bef494885eccc725ab73b668020811d56623d145b524ae + languageName: node + linkType: hard + +"@npmcli/config@npm:^8.0.0": + version: 8.3.4 + resolution: "@npmcli/config@npm:8.3.4" + dependencies: + "@npmcli/map-workspaces": "npm:^3.0.2" + "@npmcli/package-json": "npm:^5.1.1" + ci-info: "npm:^4.0.0" + ini: "npm:^4.1.2" + nopt: "npm:^7.2.1" + proc-log: "npm:^4.2.0" + semver: "npm:^7.3.5" + walk-up-path: "npm:^3.0.1" + checksum: 10c0/f44af54bd2cdb32b132a861863bfe7936599a4706490136082585ab71e37ef47f201f8d2013b9902b3ff30cc8264f5da70f834c80f0a29953b52a28da20f5ea7 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^3.1.0": + version: 3.1.1 + resolution: "@npmcli/fs@npm:3.1.1" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/c37a5b4842bfdece3d14dfdb054f73fe15ed2d3da61b34ff76629fb5b1731647c49166fd2a8bf8b56fcfa51200382385ea8909a3cbecdad612310c114d3f6c99 + languageName: node + linkType: hard + +"@npmcli/git@npm:^5.0.0": + version: 5.0.8 + resolution: "@npmcli/git@npm:5.0.8" + dependencies: + "@npmcli/promise-spawn": "npm:^7.0.0" + ini: "npm:^4.1.3" + lru-cache: "npm:^10.0.1" + npm-pick-manifest: "npm:^9.0.0" + proc-log: "npm:^4.0.0" + promise-inflight: "npm:^1.0.1" + promise-retry: "npm:^2.0.1" + semver: "npm:^7.3.5" + which: "npm:^4.0.0" + checksum: 10c0/892441c968404950809c7b515a93b78167ea1db2252f259f390feae22a2c5477f3e1629e105e19a084c05afc56e585bf3f13c2f13b54a06bfd6786f0c8429532 + languageName: node + linkType: hard + +"@npmcli/map-workspaces@npm:^3.0.2": + version: 3.0.6 + resolution: "@npmcli/map-workspaces@npm:3.0.6" + dependencies: + "@npmcli/name-from-folder": "npm:^2.0.0" + glob: "npm:^10.2.2" + minimatch: "npm:^9.0.0" + read-package-json-fast: "npm:^3.0.0" + checksum: 10c0/6bfcf8ca05ab9ddc2bd19c0fd91e9982f03cc6e67b0c03f04ba4d2f29b7d83f96e759c0f8f1f4b6dbe3182272483643a0d1269788352edd0c883d6fbfa2f3f14 + languageName: node + linkType: hard + +"@npmcli/name-from-folder@npm:^2.0.0": + version: 2.0.0 + resolution: "@npmcli/name-from-folder@npm:2.0.0" + checksum: 10c0/1aa551771d98ab366d4cb06b33efd3bb62b609942f6d9c3bb667c10e5bb39a223d3e330022bc980a44402133e702ae67603862099ac8254dad11f90e77409827 + languageName: node + linkType: hard + +"@npmcli/package-json@npm:^5.1.1": + version: 5.2.0 + resolution: "@npmcli/package-json@npm:5.2.0" + dependencies: + "@npmcli/git": "npm:^5.0.0" + glob: "npm:^10.2.2" + hosted-git-info: "npm:^7.0.0" + json-parse-even-better-errors: "npm:^3.0.0" + normalize-package-data: "npm:^6.0.0" + proc-log: "npm:^4.0.0" + semver: "npm:^7.5.3" + checksum: 10c0/bdce8c7eed0dee1d272bf8ba500c4bce6d8ed2b4dd2ce43075d3ba02ffd3bb70c46dbcf8b3a35e19d9492d039b720dc3a4b30d1a2ddc30b7918e1d5232faa1f7 + languageName: node + linkType: hard + +"@npmcli/promise-spawn@npm:^7.0.0": + version: 7.0.2 + resolution: "@npmcli/promise-spawn@npm:7.0.2" + dependencies: + which: "npm:^4.0.0" + checksum: 10c0/8f2af5bc2c1b1ccfb9bcd91da8873ab4723616d8bd5af877c0daa40b1e2cbfa4afb79e052611284179cae918c945a1b99ae1c565d78a355bec1a461011e89f71 + languageName: node + linkType: hard + +"@pkgjs/parseargs@npm:^0.11.0": + version: 0.11.0 + resolution: "@pkgjs/parseargs@npm:0.11.0" + checksum: 10c0/5bd7576bb1b38a47a7fc7b51ac9f38748e772beebc56200450c4a817d712232b8f1d3ef70532c80840243c657d491cf6a6be1e3a214cff907645819fdc34aadd + languageName: node + linkType: hard + +"@pkgr/core@npm:^0.1.0": + version: 0.1.1 + resolution: "@pkgr/core@npm:0.1.1" + checksum: 10c0/3f7536bc7f57320ab2cf96f8973664bef624710c403357429fbf680a5c3b4843c1dbd389bb43daa6b1f6f1f007bb082f5abcb76bb2b5dc9f421647743b71d3d8 + languageName: node + linkType: hard + +"@pnpm/config.env-replace@npm:^1.1.0": + version: 1.1.0 + resolution: "@pnpm/config.env-replace@npm:1.1.0" + checksum: 10c0/4cfc4a5c49ab3d0c6a1f196cfd4146374768b0243d441c7de8fa7bd28eaab6290f514b98490472cc65dbd080d34369447b3e9302585e1d5c099befd7c8b5e55f + languageName: node + linkType: hard + +"@pnpm/network.ca-file@npm:^1.0.1": + version: 1.0.2 + resolution: "@pnpm/network.ca-file@npm:1.0.2" + dependencies: + graceful-fs: "npm:4.2.10" + checksum: 10c0/95f6e0e38d047aca3283550719155ce7304ac00d98911e4ab026daedaf640a63bd83e3d13e17c623fa41ac72f3801382ba21260bcce431c14fbbc06430ecb776 + languageName: node + linkType: hard + +"@pnpm/npm-conf@npm:^2.1.0": + version: 2.3.1 + resolution: "@pnpm/npm-conf@npm:2.3.1" + dependencies: + "@pnpm/config.env-replace": "npm:^1.1.0" + "@pnpm/network.ca-file": "npm:^1.0.1" + config-chain: "npm:^1.1.11" + checksum: 10c0/778a3a34ff7d6000a2594d2a9821f873f737bc56367865718b2cf0ba5d366e49689efe7975148316d7afd8e6f1dcef7d736fbb6ea7ef55caadd1dc93a36bb302 + languageName: node + linkType: hard + +"@polka/url@npm:^1.0.0-next.24": + version: 1.0.0-next.25 + resolution: "@polka/url@npm:1.0.0-next.25" + checksum: 10c0/ef61f0a0fe94bb6e1143fc5b9d5a12e6ca9dbd2c57843ebf81db432c21b9f1005c09e8a1ef8b6d5ddfa42146ca65b640feb2d353bd0d3546da46ba59e48a5349 + languageName: node + linkType: hard + +"@sideway/address@npm:^4.1.5": + version: 4.1.5 + resolution: "@sideway/address@npm:4.1.5" + dependencies: + "@hapi/hoek": "npm:^9.0.0" + checksum: 10c0/638eb6f7e7dba209053dd6c8da74d7cc995e2b791b97644d0303a7dd3119263bcb7225a4f6804d4db2bc4f96e5a9d262975a014f58eae4d1753c27cbc96ef959 + languageName: node + linkType: hard + +"@sideway/formula@npm:^3.0.1": + version: 3.0.1 + resolution: "@sideway/formula@npm:3.0.1" + checksum: 10c0/3fe81fa9662efc076bf41612b060eb9b02e846ea4bea5bd114f1662b7f1541e9dedcf98aff0d24400bcb92f113964a50e0290b86e284edbdf6346fa9b7e2bf2c + languageName: node + linkType: hard + +"@sideway/pinpoint@npm:^2.0.0": + version: 2.0.0 + resolution: "@sideway/pinpoint@npm:2.0.0" + checksum: 10c0/d2ca75dacaf69b8fc0bb8916a204e01def3105ee44d8be16c355e5f58189eb94039e15ce831f3d544f229889ccfa35562a0ce2516179f3a7ee1bbe0b71e55b36 + languageName: node + linkType: hard + +"@sinclair/typebox@npm:^0.27.8": + version: 0.27.8 + resolution: "@sinclair/typebox@npm:0.27.8" + checksum: 10c0/ef6351ae073c45c2ac89494dbb3e1f87cc60a93ce4cde797b782812b6f97da0d620ae81973f104b43c9b7eaa789ad20ba4f6a1359f1cc62f63729a55a7d22d4e + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^4.6.0": + version: 4.6.0 + resolution: "@sindresorhus/is@npm:4.6.0" + checksum: 10c0/33b6fb1d0834ec8dd7689ddc0e2781c2bfd8b9c4e4bacbcb14111e0ae00621f2c264b8a7d36541799d74888b5dccdf422a891a5cb5a709ace26325eedc81e22e + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^5.2.0": + version: 5.6.0 + resolution: "@sindresorhus/is@npm:5.6.0" + checksum: 10c0/66727344d0c92edde5760b5fd1f8092b717f2298a162a5f7f29e4953e001479927402d9d387e245fb9dc7d3b37c72e335e93ed5875edfc5203c53be8ecba1b52 + languageName: node + linkType: hard + +"@slorber/react-ideal-image@npm:^0.0.12": + version: 0.0.12 + resolution: "@slorber/react-ideal-image@npm:0.0.12" + peerDependencies: + prop-types: ">=15" + react: ">=0.14.x" + react-waypoint: ">=9.0.2" + checksum: 10c0/1c12d236c1f9083059b631695c4c1c11affef003576de57a40fc3fe4fbde82bda2daa09ba1c7212e9c6599fb60f47361c3cbefb2a8be5515b0e46f0af000a89d + languageName: node + linkType: hard + +"@slorber/remark-comment@npm:^1.0.0": + version: 1.0.0 + resolution: "@slorber/remark-comment@npm:1.0.0" + dependencies: + micromark-factory-space: "npm:^1.0.0" + micromark-util-character: "npm:^1.1.0" + micromark-util-symbol: "npm:^1.0.1" + checksum: 10c0/b8da9d8f560740959c421d3ce5be43952eace1c95cb65402d9473a15e66463346a37fb5f121a6b22a83af51e8845b0b4ff3c321f14ce31bd58fb126acf6c8ed9 + languageName: node + linkType: hard + +"@svgr/babel-plugin-add-jsx-attribute@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-add-jsx-attribute@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/a50bd0baa34faf16bcba712091f94c7f0e230431fe99a9dfc3401fa92823ad3f68495b86ab9bf9044b53839e8c416cfbb37eb3f246ff33f261e0fa9ee1779c5b + languageName: node + linkType: hard + +"@svgr/babel-plugin-remove-jsx-attribute@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-remove-jsx-attribute@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/8a98e59bd9971e066815b4129409932f7a4db4866834fe75677ea6d517972fb40b380a69a4413189f20e7947411f9ab1b0f029dd5e8068686a5a0188d3ccd4c7 + languageName: node + linkType: hard + +"@svgr/babel-plugin-remove-jsx-empty-expression@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-remove-jsx-empty-expression@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/517dcca75223bd05d3f056a8514dbba3031278bea4eadf0842c576d84f4651e7a4e0e7082d3ee4ef42456de0f9c4531d8a1917c04876ca64b014b859ca8f1bde + languageName: node + linkType: hard + +"@svgr/babel-plugin-replace-jsx-attribute-value@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-replace-jsx-attribute-value@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/004bd1892053b7e9c1b0bb14acc44e77634ec393722b87b1e4fae53e2c35122a2dd0d5c15e9070dbeec274e22e7693a2b8b48506733a8009ee92b12946fcb10a + languageName: node + linkType: hard + +"@svgr/babel-plugin-svg-dynamic-title@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-svg-dynamic-title@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/80e0a7fcf902f984c705051ca5c82ea6050ccbb70b651a8fea6d0eb5809e4dac274b49ea6be2d87f1eb9dfc0e2d6cdfffe1669ec2117f44b67a60a07d4c0b8b8 + languageName: node + linkType: hard + +"@svgr/babel-plugin-svg-em-dimensions@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-svg-em-dimensions@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/73e92c8277a89279745c0c500f59f083279a8dc30cd552b22981fade2a77628fb2bd2819ee505725fcd2e93f923e3790b52efcff409a159e657b46604a0b9a21 + languageName: node + linkType: hard + +"@svgr/babel-plugin-transform-react-native-svg@npm:8.1.0": + version: 8.1.0 + resolution: "@svgr/babel-plugin-transform-react-native-svg@npm:8.1.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/655ed6bc7a208ceaa4ecff0a54ccc36008c3cb31efa90d11e171cab325ebbb21aa78f09c7b65f9b3ddeda3a85f348c0c862902c48be13c14b4de165c847974e3 + languageName: node + linkType: hard + +"@svgr/babel-plugin-transform-svg-component@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/babel-plugin-transform-svg-component@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/4ac00bb99a3db4ef05e4362f116a3c608ee365a2d26cf7318d8d41a4a5b30a02c80455cce0e62c65b60ed815b5d632bedabac2ccd4b56f998fadef5286e3ded4 + languageName: node + linkType: hard + +"@svgr/babel-preset@npm:8.1.0": + version: 8.1.0 + resolution: "@svgr/babel-preset@npm:8.1.0" + dependencies: + "@svgr/babel-plugin-add-jsx-attribute": "npm:8.0.0" + "@svgr/babel-plugin-remove-jsx-attribute": "npm:8.0.0" + "@svgr/babel-plugin-remove-jsx-empty-expression": "npm:8.0.0" + "@svgr/babel-plugin-replace-jsx-attribute-value": "npm:8.0.0" + "@svgr/babel-plugin-svg-dynamic-title": "npm:8.0.0" + "@svgr/babel-plugin-svg-em-dimensions": "npm:8.0.0" + "@svgr/babel-plugin-transform-react-native-svg": "npm:8.1.0" + "@svgr/babel-plugin-transform-svg-component": "npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 10c0/49367d3ad0831f79b1056871b91766246f449d4d1168623af5e283fbaefce4a01d77ab00de6b045b55e956f9aae27895823198493cd232d88d3435ea4517ffc5 + languageName: node + linkType: hard + +"@svgr/core@npm:8.1.0": + version: 8.1.0 + resolution: "@svgr/core@npm:8.1.0" + dependencies: + "@babel/core": "npm:^7.21.3" + "@svgr/babel-preset": "npm:8.1.0" + camelcase: "npm:^6.2.0" + cosmiconfig: "npm:^8.1.3" + snake-case: "npm:^3.0.4" + checksum: 10c0/6a2f6b1bc79bce39f66f088d468985d518005fc5147ebf4f108570a933818b5951c2cb7da230ddff4b7c8028b5a672b2d33aa2acce012b8b9770073aa5a2d041 + languageName: node + linkType: hard + +"@svgr/hast-util-to-babel-ast@npm:8.0.0": + version: 8.0.0 + resolution: "@svgr/hast-util-to-babel-ast@npm:8.0.0" + dependencies: + "@babel/types": "npm:^7.21.3" + entities: "npm:^4.4.0" + checksum: 10c0/f4165b583ba9eaf6719e598977a7b3ed182f177983e55f9eb55a6a73982d81277510e9eb7ab41f255151fb9ed4edd11ac4bef95dd872f04ed64966d8c85e0f79 + languageName: node + linkType: hard + +"@svgr/plugin-jsx@npm:8.1.0": + version: 8.1.0 + resolution: "@svgr/plugin-jsx@npm:8.1.0" + dependencies: + "@babel/core": "npm:^7.21.3" + "@svgr/babel-preset": "npm:8.1.0" + "@svgr/hast-util-to-babel-ast": "npm:8.0.0" + svg-parser: "npm:^2.0.4" + peerDependencies: + "@svgr/core": "*" + checksum: 10c0/07b4d9e00de795540bf70556fa2cc258774d01e97a12a26234c6fdf42b309beb7c10f31ee24d1a71137239347b1547b8bb5587d3a6de10669f95dcfe99cddc56 + languageName: node + linkType: hard + +"@svgr/plugin-svgo@npm:8.1.0": + version: 8.1.0 + resolution: "@svgr/plugin-svgo@npm:8.1.0" + dependencies: + cosmiconfig: "npm:^8.1.3" + deepmerge: "npm:^4.3.1" + svgo: "npm:^3.0.2" + peerDependencies: + "@svgr/core": "*" + checksum: 10c0/bfd25460f23f1548bfb8f6f3bedd6d6972c1a4f8881bd35a4f8c115218da6e999e8f9ac0ef0ed88c4e0b93fcec37f382b94c0322f4ec2b26752a89e5cc8b9d7a + languageName: node + linkType: hard + +"@svgr/webpack@npm:^8.1.0": + version: 8.1.0 + resolution: "@svgr/webpack@npm:8.1.0" + dependencies: + "@babel/core": "npm:^7.21.3" + "@babel/plugin-transform-react-constant-elements": "npm:^7.21.3" + "@babel/preset-env": "npm:^7.20.2" + "@babel/preset-react": "npm:^7.18.6" + "@babel/preset-typescript": "npm:^7.21.0" + "@svgr/core": "npm:8.1.0" + "@svgr/plugin-jsx": "npm:8.1.0" + "@svgr/plugin-svgo": "npm:8.1.0" + checksum: 10c0/4c1cac45bd5890de8643e5a7bfb71f3bcd8b85ae5bbacf10b8ad9f939b7a98e8d601c3ada204ffb95223abf4a24beeac5a2a0d6928a52a1ab72a29da3c015c22 + languageName: node + linkType: hard + +"@szmarczak/http-timer@npm:^5.0.1": + version: 5.0.1 + resolution: "@szmarczak/http-timer@npm:5.0.1" + dependencies: + defer-to-connect: "npm:^2.0.1" + checksum: 10c0/4629d2fbb2ea67c2e9dc03af235c0991c79ebdddcbc19aed5d5732fb29ce01c13331e9b1a491584b9069bd6ecde6581dcbf871f11b7eefdebbab34de6cf2197e + languageName: node + linkType: hard + +"@trysound/sax@npm:0.2.0": + version: 0.2.0 + resolution: "@trysound/sax@npm:0.2.0" + checksum: 10c0/44907308549ce775a41c38a815f747009ac45929a45d642b836aa6b0a536e4978d30b8d7d680bbd116e9dd73b7dbe2ef0d1369dcfc2d09e83ba381e485ecbe12 + languageName: node + linkType: hard + +"@types/acorn@npm:^4.0.0": + version: 4.0.6 + resolution: "@types/acorn@npm:4.0.6" + dependencies: + "@types/estree": "npm:*" + checksum: 10c0/5a65a1d7e91fc95703f0a717897be60fa7ccd34b17f5462056274a246e6690259fe0a1baabc86fd3260354f87245cb3dc483346d7faad2b78fc199763978ede9 + languageName: node + linkType: hard + +"@types/babel__core@npm:^7.20.5": + version: 7.20.5 + resolution: "@types/babel__core@npm:7.20.5" + dependencies: + "@babel/parser": "npm:^7.20.7" + "@babel/types": "npm:^7.20.7" + "@types/babel__generator": "npm:*" + "@types/babel__template": "npm:*" + "@types/babel__traverse": "npm:*" + checksum: 10c0/bdee3bb69951e833a4b811b8ee9356b69a61ed5b7a23e1a081ec9249769117fa83aaaf023bb06562a038eb5845155ff663e2d5c75dd95c1d5ccc91db012868ff + languageName: node + linkType: hard + +"@types/babel__generator@npm:*": + version: 7.6.8 + resolution: "@types/babel__generator@npm:7.6.8" + dependencies: + "@babel/types": "npm:^7.0.0" + checksum: 10c0/f0ba105e7d2296bf367d6e055bb22996886c114261e2cb70bf9359556d0076c7a57239d019dee42bb063f565bade5ccb46009bce2044b2952d964bf9a454d6d2 + languageName: node + linkType: hard + +"@types/babel__template@npm:*": + version: 7.4.4 + resolution: "@types/babel__template@npm:7.4.4" + dependencies: + "@babel/parser": "npm:^7.1.0" + "@babel/types": "npm:^7.0.0" + checksum: 10c0/cc84f6c6ab1eab1427e90dd2b76ccee65ce940b778a9a67be2c8c39e1994e6f5bbc8efa309f6cea8dc6754994524cd4d2896558df76d92e7a1f46ecffee7112b + languageName: node + linkType: hard + +"@types/babel__traverse@npm:*": + version: 7.20.6 + resolution: "@types/babel__traverse@npm:7.20.6" + dependencies: + "@babel/types": "npm:^7.20.7" + checksum: 10c0/7ba7db61a53e28cac955aa99af280d2600f15a8c056619c05b6fc911cbe02c61aa4f2823299221b23ce0cce00b294c0e5f618ec772aa3f247523c2e48cf7b888 + languageName: node + linkType: hard + +"@types/body-parser@npm:*": + version: 1.19.5 + resolution: "@types/body-parser@npm:1.19.5" + dependencies: + "@types/connect": "npm:*" + "@types/node": "npm:*" + checksum: 10c0/aebeb200f25e8818d8cf39cd0209026750d77c9b85381cdd8deeb50913e4d18a1ebe4b74ca9b0b4d21952511eeaba5e9fbbf739b52731a2061e206ec60d568df + languageName: node + linkType: hard + +"@types/bonjour@npm:^3.5.9": + version: 3.5.13 + resolution: "@types/bonjour@npm:3.5.13" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/eebedbca185ac3c39dd5992ef18d9e2a9f99e7f3c2f52f5561f90e9ed482c5d224c7962db95362712f580ed5713264e777a98d8f0bd8747f4eadf62937baed16 + languageName: node + linkType: hard + +"@types/concat-stream@npm:^2.0.0": + version: 2.0.3 + resolution: "@types/concat-stream@npm:2.0.3" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/dd8bdf8061d275f30dc602e04c63ebc001d3a260e722c867916667a45f90fd22da62a2de0919a35f35969b84a14cb94c69d15bdb2c8a518ce8abf3a0e1a16e5d + languageName: node + linkType: hard + +"@types/connect-history-api-fallback@npm:^1.3.5": + version: 1.5.4 + resolution: "@types/connect-history-api-fallback@npm:1.5.4" + dependencies: + "@types/express-serve-static-core": "npm:*" + "@types/node": "npm:*" + checksum: 10c0/1b4035b627dcd714b05a22557f942e24a57ca48e7377dde0d2f86313fe685bc0a6566512a73257a55b5665b96c3041fb29228ac93331d8133011716215de8244 + languageName: node + linkType: hard + +"@types/connect@npm:*": + version: 3.4.38 + resolution: "@types/connect@npm:3.4.38" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/2e1cdba2c410f25649e77856505cd60223250fa12dff7a503e492208dbfdd25f62859918f28aba95315251fd1f5e1ffbfca1e25e73037189ab85dd3f8d0a148c + languageName: node + linkType: hard + +"@types/d3-scale-chromatic@npm:^3.0.0": + version: 3.0.3 + resolution: "@types/d3-scale-chromatic@npm:3.0.3" + checksum: 10c0/2f48c6f370edba485b57b73573884ded71914222a4580140ff87ee96e1d55ccd05b1d457f726e234a31269b803270ac95d5554229ab6c43c7e4a9894e20dd490 + languageName: node + linkType: hard + +"@types/d3-scale@npm:^4.0.3": + version: 4.0.8 + resolution: "@types/d3-scale@npm:4.0.8" + dependencies: + "@types/d3-time": "npm:*" + checksum: 10c0/57de90e4016f640b83cb960b7e3a0ab3ed02e720898840ddc5105264ffcfea73336161442fdc91895377c2d2f91904d637282f16852b8535b77e15a761c8e99e + languageName: node + linkType: hard + +"@types/d3-time@npm:*": + version: 3.0.3 + resolution: "@types/d3-time@npm:3.0.3" + checksum: 10c0/245a8aadca504df27edf730de502e47a68f16ae795c86b5ca35e7afa91c133aa9ef4d08778f8cf1ed2be732f89a4105ba4b437ce2afbdfd17d3d937b6ba5f568 + languageName: node + linkType: hard + +"@types/debug@npm:^4.0.0": + version: 4.1.12 + resolution: "@types/debug@npm:4.1.12" + dependencies: + "@types/ms": "npm:*" + checksum: 10c0/5dcd465edbb5a7f226e9a5efd1f399c6172407ef5840686b73e3608ce135eeca54ae8037dcd9f16bdb2768ac74925b820a8b9ecc588a58ca09eca6acabe33e2f + languageName: node + linkType: hard + +"@types/estree-jsx@npm:^1.0.0": + version: 1.0.5 + resolution: "@types/estree-jsx@npm:1.0.5" + dependencies: + "@types/estree": "npm:*" + checksum: 10c0/07b354331516428b27a3ab99ee397547d47eb223c34053b48f84872fafb841770834b90cc1a0068398e7c7ccb15ec51ab00ec64b31dc5e3dbefd624638a35c6d + languageName: node + linkType: hard + +"@types/estree@npm:*, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.5": + version: 1.0.5 + resolution: "@types/estree@npm:1.0.5" + checksum: 10c0/b3b0e334288ddb407c7b3357ca67dbee75ee22db242ca7c56fe27db4e1a31989cb8af48a84dd401deb787fe10cc6b2ab1ee82dc4783be87ededbe3d53c79c70d + languageName: node + linkType: hard + +"@types/express-serve-static-core@npm:*, @types/express-serve-static-core@npm:^4.17.33": + version: 4.19.5 + resolution: "@types/express-serve-static-core@npm:4.19.5" + dependencies: + "@types/node": "npm:*" + "@types/qs": "npm:*" + "@types/range-parser": "npm:*" + "@types/send": "npm:*" + checksum: 10c0/ba8d8d976ab797b2602c60e728802ff0c98a00f13d420d82770f3661b67fa36ea9d3be0b94f2ddd632afe1fbc6e41620008b01db7e4fabdd71a2beb5539b0725 + languageName: node + linkType: hard + +"@types/express@npm:*, @types/express@npm:^4.17.13": + version: 4.17.21 + resolution: "@types/express@npm:4.17.21" + dependencies: + "@types/body-parser": "npm:*" + "@types/express-serve-static-core": "npm:^4.17.33" + "@types/qs": "npm:*" + "@types/serve-static": "npm:*" + checksum: 10c0/12e562c4571da50c7d239e117e688dc434db1bac8be55613294762f84fd77fbd0658ccd553c7d3ab02408f385bc93980992369dd30e2ecd2c68c358e6af8fabf + languageName: node + linkType: hard + +"@types/gtag.js@npm:^0.0.12": + version: 0.0.12 + resolution: "@types/gtag.js@npm:0.0.12" + checksum: 10c0/fee8f4c6e627301b89ab616c9e219bd53fa6ea1ffd1d0a8021e21363f0bdb2cf7eb1a5bcda0c6f1502186379bc7784ec29c932e21634f4e07f9e7a8c56887400 + languageName: node + linkType: hard + +"@types/hast@npm:^3.0.0": + version: 3.0.4 + resolution: "@types/hast@npm:3.0.4" + dependencies: + "@types/unist": "npm:*" + checksum: 10c0/3249781a511b38f1d330fd1e3344eed3c4e7ea8eff82e835d35da78e637480d36fad37a78be5a7aed8465d237ad0446abc1150859d0fde395354ea634decf9f7 + languageName: node + linkType: hard + +"@types/history@npm:^4.7.11": + version: 4.7.11 + resolution: "@types/history@npm:4.7.11" + checksum: 10c0/3facf37c2493d1f92b2e93a22cac7ea70b06351c2ab9aaceaa3c56aa6099fb63516f6c4ec1616deb5c56b4093c026a043ea2d3373e6c0644d55710364d02c934 + languageName: node + linkType: hard + +"@types/html-minifier-terser@npm:^6.0.0": + version: 6.1.0 + resolution: "@types/html-minifier-terser@npm:6.1.0" + checksum: 10c0/a62fb8588e2f3818d82a2d7b953ad60a4a52fd767ae04671de1c16f5788bd72f1ed3a6109ed63fd190c06a37d919e3c39d8adbc1793a005def76c15a3f5f5dab + languageName: node + linkType: hard + +"@types/http-cache-semantics@npm:^4.0.2": + version: 4.0.4 + resolution: "@types/http-cache-semantics@npm:4.0.4" + checksum: 10c0/51b72568b4b2863e0fe8d6ce8aad72a784b7510d72dc866215642da51d84945a9459fa89f49ec48f1e9a1752e6a78e85a4cda0ded06b1c73e727610c925f9ce6 + languageName: node + linkType: hard + +"@types/http-errors@npm:*": + version: 2.0.4 + resolution: "@types/http-errors@npm:2.0.4" + checksum: 10c0/494670a57ad4062fee6c575047ad5782506dd35a6b9ed3894cea65830a94367bd84ba302eb3dde331871f6d70ca287bfedb1b2cf658e6132cd2cbd427ab56836 + languageName: node + linkType: hard + +"@types/http-proxy@npm:^1.17.8": + version: 1.17.15 + resolution: "@types/http-proxy@npm:1.17.15" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/e2bf2fcdf23c88141b8d2c85ed5e5418b62ef78285884a2b5a717af55f4d9062136aa475489d10292093343df58fb81975f34bebd6b9df322288fd9821cbee07 + languageName: node + linkType: hard + +"@types/is-empty@npm:^1.0.0": + version: 1.2.3 + resolution: "@types/is-empty@npm:1.2.3" + checksum: 10c0/2ca9af27ce93cc0abe277178a69803e641d755152bf4fc415e1789451ff62f6e39cf15dbdc111d490171d757669937ad4789c7395af55f5e7d261f6bfe416974 + languageName: node + linkType: hard + +"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0": + version: 2.0.6 + resolution: "@types/istanbul-lib-coverage@npm:2.0.6" + checksum: 10c0/3948088654f3eeb45363f1db158354fb013b362dba2a5c2c18c559484d5eb9f6fd85b23d66c0a7c2fcfab7308d0a585b14dadaca6cc8bf89ebfdc7f8f5102fb7 + languageName: node + linkType: hard + +"@types/istanbul-lib-report@npm:*": + version: 3.0.3 + resolution: "@types/istanbul-lib-report@npm:3.0.3" + dependencies: + "@types/istanbul-lib-coverage": "npm:*" + checksum: 10c0/247e477bbc1a77248f3c6de5dadaae85ff86ac2d76c5fc6ab1776f54512a745ff2a5f791d22b942e3990ddbd40f3ef5289317c4fca5741bedfaa4f01df89051c + languageName: node + linkType: hard + +"@types/istanbul-reports@npm:^3.0.0": + version: 3.0.4 + resolution: "@types/istanbul-reports@npm:3.0.4" + dependencies: + "@types/istanbul-lib-report": "npm:*" + checksum: 10c0/1647fd402aced5b6edac87274af14ebd6b3a85447ef9ad11853a70fd92a98d35f81a5d3ea9fcb5dbb5834e800c6e35b64475e33fcae6bfa9acc70d61497c54ee + languageName: node + linkType: hard + +"@types/json-schema@npm:^7.0.4, @types/json-schema@npm:^7.0.5, @types/json-schema@npm:^7.0.8, @types/json-schema@npm:^7.0.9": + version: 7.0.15 + resolution: "@types/json-schema@npm:7.0.15" + checksum: 10c0/a996a745e6c5d60292f36731dd41341339d4eeed8180bb09226e5c8d23759067692b1d88e5d91d72ee83dfc00d3aca8e7bd43ea120516c17922cbcb7c3e252db + languageName: node + linkType: hard + +"@types/mdast@npm:^3.0.0": + version: 3.0.15 + resolution: "@types/mdast@npm:3.0.15" + dependencies: + "@types/unist": "npm:^2" + checksum: 10c0/fcbf716c03d1ed5465deca60862e9691414f9c43597c288c7d2aefbe274552e1bbd7aeee91b88a02597e88a28c139c57863d0126fcf8416a95fdc681d054ee3d + languageName: node + linkType: hard + +"@types/mdast@npm:^4.0.0, @types/mdast@npm:^4.0.2": + version: 4.0.4 + resolution: "@types/mdast@npm:4.0.4" + dependencies: + "@types/unist": "npm:*" + checksum: 10c0/84f403dbe582ee508fd9c7643ac781ad8597fcbfc9ccb8d4715a2c92e4545e5772cbd0dbdf18eda65789386d81b009967fdef01b24faf6640f817287f54d9c82 + languageName: node + linkType: hard + +"@types/mdx@npm:^2.0.0": + version: 2.0.13 + resolution: "@types/mdx@npm:2.0.13" + checksum: 10c0/5edf1099505ac568da55f9ae8a93e7e314e8cbc13d3445d0be61b75941226b005e1390d9b95caecf5dcb00c9d1bab2f1f60f6ff9876dc091a48b547495007720 + languageName: node + linkType: hard + +"@types/mime@npm:^1": + version: 1.3.5 + resolution: "@types/mime@npm:1.3.5" + checksum: 10c0/c2ee31cd9b993804df33a694d5aa3fa536511a49f2e06eeab0b484fef59b4483777dbb9e42a4198a0809ffbf698081fdbca1e5c2218b82b91603dfab10a10fbc + languageName: node + linkType: hard + +"@types/ms@npm:*": + version: 0.7.34 + resolution: "@types/ms@npm:0.7.34" + checksum: 10c0/ac80bd90012116ceb2d188fde62d96830ca847823e8ca71255616bc73991aa7d9f057b8bfab79e8ee44ffefb031ddd1bcce63ea82f9e66f7c31ec02d2d823ccc + languageName: node + linkType: hard + +"@types/node-forge@npm:^1.3.0": + version: 1.3.11 + resolution: "@types/node-forge@npm:1.3.11" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/3d7d23ca0ba38ac0cf74028393bd70f31169ab9aba43f21deb787840170d307d662644bac07287495effe2812ddd7ac8a14dbd43f16c2936bbb06312e96fc3b9 + languageName: node + linkType: hard + +"@types/node@npm:*": + version: 22.5.0 + resolution: "@types/node@npm:22.5.0" + dependencies: + undici-types: "npm:~6.19.2" + checksum: 10c0/45aa75c5e71645fac42dced4eff7f197c3fdfff6e8a9fdacd0eb2e748ff21ee70ffb73982f068a58e8d73b2c088a63613142c125236cdcf3c072ea97eada1559 + languageName: node + linkType: hard + +"@types/node@npm:^17.0.5": + version: 17.0.45 + resolution: "@types/node@npm:17.0.45" + checksum: 10c0/0db377133d709b33a47892581a21a41cd7958f22723a3cc6c71d55ac018121382de42fbfc7970d5ae3e7819dbe5f40e1c6a5174aedf7e7964e9cb8fa72b580b0 + languageName: node + linkType: hard + +"@types/node@npm:^20.0.0": + version: 20.16.1 + resolution: "@types/node@npm:20.16.1" + dependencies: + undici-types: "npm:~6.19.2" + checksum: 10c0/cac13c0f42467df3254805a671ca9e74a6eb7c41568de972e26b10dcc448a45743aaf00e9e5fce4a9214da5bc8444fe902918e105dac5a224e24e83fd9989a97 + languageName: node + linkType: hard + +"@types/node@npm:^22.5.4": + version: 22.5.4 + resolution: "@types/node@npm:22.5.4" + dependencies: + undici-types: "npm:~6.19.2" + checksum: 10c0/b445daa7eecd761ad4d778b882d6ff7bcc3b4baad2086ea9804db7c5d4a4ab0298b00d7f5315fc640a73b5a1d52bbf9628e09c9fec0cf44dbf9b4df674a8717d + languageName: node + linkType: hard + +"@types/parse-json@npm:^4.0.0": + version: 4.0.2 + resolution: "@types/parse-json@npm:4.0.2" + checksum: 10c0/b1b863ac34a2c2172fbe0807a1ec4d5cb684e48d422d15ec95980b81475fac4fdb3768a8b13eef39130203a7c04340fc167bae057c7ebcafd7dec9fe6c36aeb1 + languageName: node + linkType: hard + +"@types/prismjs@npm:^1.26.0": + version: 1.26.4 + resolution: "@types/prismjs@npm:1.26.4" + checksum: 10c0/996be7d119779c4cbe66e58342115a12d35a02226dae3aaa4a744c9652d5a3939c93c26182e18156965ac4f93575ebb309c3469c36f52e60ee5c0f8f27e874df + languageName: node + linkType: hard + +"@types/prop-types@npm:*": + version: 15.7.12 + resolution: "@types/prop-types@npm:15.7.12" + checksum: 10c0/1babcc7db6a1177779f8fde0ccc78d64d459906e6ef69a4ed4dd6339c920c2e05b074ee5a92120fe4e9d9f1a01c952f843ebd550bee2332fc2ef81d1706878f8 + languageName: node + linkType: hard + +"@types/qs@npm:*": + version: 6.9.15 + resolution: "@types/qs@npm:6.9.15" + checksum: 10c0/49c5ff75ca3adb18a1939310042d273c9fc55920861bd8e5100c8a923b3cda90d759e1a95e18334092da1c8f7b820084687770c83a1ccef04fb2c6908117c823 + languageName: node + linkType: hard + +"@types/range-parser@npm:*": + version: 1.2.7 + resolution: "@types/range-parser@npm:1.2.7" + checksum: 10c0/361bb3e964ec5133fa40644a0b942279ed5df1949f21321d77de79f48b728d39253e5ce0408c9c17e4e0fd95ca7899da36841686393b9f7a1e209916e9381a3c + languageName: node + linkType: hard + +"@types/react-router-config@npm:*, @types/react-router-config@npm:^5.0.7": + version: 5.0.11 + resolution: "@types/react-router-config@npm:5.0.11" + dependencies: + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + "@types/react-router": "npm:^5.1.0" + checksum: 10c0/3fa4daf8c14689a05f34e289fc53c4a892e97f35715455c507a8048d9875b19cd3d3142934ca973effed6a6c38f33539b6e173cd254f67e2021ecd5458d551c8 + languageName: node + linkType: hard + +"@types/react-router-dom@npm:*": + version: 5.3.3 + resolution: "@types/react-router-dom@npm:5.3.3" + dependencies: + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + "@types/react-router": "npm:*" + checksum: 10c0/a9231a16afb9ed5142678147eafec9d48582809295754fb60946e29fcd3757a4c7a3180fa94b45763e4c7f6e3f02379e2fcb8dd986db479dcab40eff5fc62a91 + languageName: node + linkType: hard + +"@types/react-router@npm:*, @types/react-router@npm:^5.1.0": + version: 5.1.20 + resolution: "@types/react-router@npm:5.1.20" + dependencies: + "@types/history": "npm:^4.7.11" + "@types/react": "npm:*" + checksum: 10c0/1f7eee61981d2f807fa01a34a0ef98ebc0774023832b6611a69c7f28fdff01de5a38cabf399f32e376bf8099dcb7afaf724775bea9d38870224492bea4cb5737 + languageName: node + linkType: hard + +"@types/react@npm:*": + version: 18.3.4 + resolution: "@types/react@npm:18.3.4" + dependencies: + "@types/prop-types": "npm:*" + csstype: "npm:^3.0.2" + checksum: 10c0/5c52e1e6f540cff21e3c2a5212066d02e005f6fb21e4a536a29097fae878db9f407cd7a4b43778f51359349c5f692e08bc77ddb5f5cecbfca9ca4d4e3c91a48e + languageName: node + linkType: hard + +"@types/react@npm:^18.3.5": + version: 18.3.5 + resolution: "@types/react@npm:18.3.5" + dependencies: + "@types/prop-types": "npm:*" + csstype: "npm:^3.0.2" + checksum: 10c0/548b1d3d7c2f0242fbfdbbd658731b4ce69a134be072fa83e6ab516f2840402a3f20e3e7f72e95133b23d4880ef24a6d864050dc8e1f7c68f39fa87ca8445917 + languageName: node + linkType: hard + +"@types/retry@npm:0.12.0": + version: 0.12.0 + resolution: "@types/retry@npm:0.12.0" + checksum: 10c0/7c5c9086369826f569b83a4683661557cab1361bac0897a1cefa1a915ff739acd10ca0d62b01071046fe3f5a3f7f2aec80785fe283b75602dc6726781ea3e328 + languageName: node + linkType: hard + +"@types/sax@npm:^1.2.1": + version: 1.2.7 + resolution: "@types/sax@npm:1.2.7" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/d077a761a0753b079bf8279b3993948030ca86ed9125437b9b29c1de40db9b2deb7fddc369f014b58861d450e8b8cc75f163aa29dc8cea81952efbfd859168cf + languageName: node + linkType: hard + +"@types/semver@npm:^7.3.12": + version: 7.5.8 + resolution: "@types/semver@npm:7.5.8" + checksum: 10c0/8663ff927234d1c5fcc04b33062cb2b9fcfbe0f5f351ed26c4d1e1581657deebd506b41ff7fdf89e787e3d33ce05854bc01686379b89e9c49b564c4cfa988efa + languageName: node + linkType: hard + +"@types/send@npm:*": + version: 0.17.4 + resolution: "@types/send@npm:0.17.4" + dependencies: + "@types/mime": "npm:^1" + "@types/node": "npm:*" + checksum: 10c0/7f17fa696cb83be0a104b04b424fdedc7eaba1c9a34b06027239aba513b398a0e2b7279778af521f516a397ced417c96960e5f50fcfce40c4bc4509fb1a5883c + languageName: node + linkType: hard + +"@types/serve-index@npm:^1.9.1": + version: 1.9.4 + resolution: "@types/serve-index@npm:1.9.4" + dependencies: + "@types/express": "npm:*" + checksum: 10c0/94c1b9e8f1ea36a229e098e1643d5665d9371f8c2658521718e259130a237c447059b903bac0dcc96ee2c15fd63f49aa647099b7d0d437a67a6946527a837438 + languageName: node + linkType: hard + +"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10": + version: 1.15.7 + resolution: "@types/serve-static@npm:1.15.7" + dependencies: + "@types/http-errors": "npm:*" + "@types/node": "npm:*" + "@types/send": "npm:*" + checksum: 10c0/26ec864d3a626ea627f8b09c122b623499d2221bbf2f470127f4c9ebfe92bd8a6bb5157001372d4c4bd0dd37a1691620217d9dc4df5aa8f779f3fd996b1c60ae + languageName: node + linkType: hard + +"@types/sockjs@npm:^0.3.33": + version: 0.3.36 + resolution: "@types/sockjs@npm:0.3.36" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/b20b7820ee813f22de4f2ce98bdd12c68c930e016a8912b1ed967595ac0d8a4cbbff44f4d486dd97f77f5927e7b5725bdac7472c9ec5b27f53a5a13179f0612f + languageName: node + linkType: hard + +"@types/supports-color@npm:^8.0.0": + version: 8.1.3 + resolution: "@types/supports-color@npm:8.1.3" + checksum: 10c0/03aa3616b403f3deaeb774df6d3a3969845b0c9f449814a83c2c53eb6818f5f9b571ba205330b0ebe8e46f41fd550f581a34b4310b13f0e0448694cfff37ddbf + languageName: node + linkType: hard + +"@types/unist@npm:*, @types/unist@npm:^3.0.0": + version: 3.0.3 + resolution: "@types/unist@npm:3.0.3" + checksum: 10c0/2b1e4adcab78388e088fcc3c0ae8700f76619dbcb4741d7d201f87e2cb346bfc29a89003cfea2d76c996e1061452e14fcd737e8b25aacf949c1f2d6b2bc3dd60 + languageName: node + linkType: hard + +"@types/unist@npm:^2, @types/unist@npm:^2.0.0, @types/unist@npm:^2.0.2": + version: 2.0.11 + resolution: "@types/unist@npm:2.0.11" + checksum: 10c0/24dcdf25a168f453bb70298145eb043cfdbb82472db0bc0b56d6d51cd2e484b9ed8271d4ac93000a80da568f2402e9339723db262d0869e2bf13bc58e081768d + languageName: node + linkType: hard + +"@types/ws@npm:^8.5.5": + version: 8.5.12 + resolution: "@types/ws@npm:8.5.12" + dependencies: + "@types/node": "npm:*" + checksum: 10c0/3fd77c9e4e05c24ce42bfc7647f7506b08c40a40fe2aea236ef6d4e96fc7cb4006a81ed1b28ec9c457e177a74a72924f4768b7b4652680b42dfd52bc380e15f9 + languageName: node + linkType: hard + +"@types/yargs-parser@npm:*": + version: 21.0.3 + resolution: "@types/yargs-parser@npm:21.0.3" + checksum: 10c0/e71c3bd9d0b73ca82e10bee2064c384ab70f61034bbfb78e74f5206283fc16a6d85267b606b5c22cb2a3338373586786fed595b2009825d6a9115afba36560a0 + languageName: node + linkType: hard + +"@types/yargs@npm:^17.0.8": + version: 17.0.33 + resolution: "@types/yargs@npm:17.0.33" + dependencies: + "@types/yargs-parser": "npm:*" + checksum: 10c0/d16937d7ac30dff697801c3d6f235be2166df42e4a88bf730fa6dc09201de3727c0a9500c59a672122313341de5f24e45ee0ff579c08ce91928e519090b7906b + languageName: node + linkType: hard + +"@typescript-eslint/eslint-plugin@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/eslint-plugin@npm:8.4.0" + dependencies: + "@eslint-community/regexpp": "npm:^4.10.0" + "@typescript-eslint/scope-manager": "npm:8.4.0" + "@typescript-eslint/type-utils": "npm:8.4.0" + "@typescript-eslint/utils": "npm:8.4.0" + "@typescript-eslint/visitor-keys": "npm:8.4.0" + graphemer: "npm:^1.4.0" + ignore: "npm:^5.3.1" + natural-compare: "npm:^1.4.0" + ts-api-utils: "npm:^1.3.0" + peerDependencies: + "@typescript-eslint/parser": ^8.0.0 || ^8.0.0-alpha.0 + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/c75e9bb176e9e0277c9f9c4c006bc2c31ac91984e555de1390a9bbe876e3b6787d59d96015b3f0cd083fd22c814aea4ed4858910d3afdd24d64ab79815da31e5 + languageName: node + linkType: hard + +"@typescript-eslint/parser@npm:8.4.0, @typescript-eslint/parser@npm:^8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/parser@npm:8.4.0" + dependencies: + "@typescript-eslint/scope-manager": "npm:8.4.0" + "@typescript-eslint/types": "npm:8.4.0" + "@typescript-eslint/typescript-estree": "npm:8.4.0" + "@typescript-eslint/visitor-keys": "npm:8.4.0" + debug: "npm:^4.3.4" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/19f3358e5bc4bbad693183eefe1a90ea64be054a934bc2c8a972ff4738b94580b55ad4955af5797db42298628caa59b3ba3f9fd960582b5fc2c836da3a4578a5 + languageName: node + linkType: hard + +"@typescript-eslint/parser@npm:^6.7.5": + version: 6.21.0 + resolution: "@typescript-eslint/parser@npm:6.21.0" + dependencies: + "@typescript-eslint/scope-manager": "npm:6.21.0" + "@typescript-eslint/types": "npm:6.21.0" + "@typescript-eslint/typescript-estree": "npm:6.21.0" + "@typescript-eslint/visitor-keys": "npm:6.21.0" + debug: "npm:^4.3.4" + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/a8f99820679decd0d115c0af61903fb1de3b1b5bec412dc72b67670bf636de77ab07f2a68ee65d6da7976039bbf636907f9d5ca546db3f0b98a31ffbc225bc7d + languageName: node + linkType: hard + +"@typescript-eslint/scope-manager@npm:5.62.0": + version: 5.62.0 + resolution: "@typescript-eslint/scope-manager@npm:5.62.0" + dependencies: + "@typescript-eslint/types": "npm:5.62.0" + "@typescript-eslint/visitor-keys": "npm:5.62.0" + checksum: 10c0/861253235576c1c5c1772d23cdce1418c2da2618a479a7de4f6114a12a7ca853011a1e530525d0931c355a8fd237b9cd828fac560f85f9623e24054fd024726f + languageName: node + linkType: hard + +"@typescript-eslint/scope-manager@npm:6.21.0": + version: 6.21.0 + resolution: "@typescript-eslint/scope-manager@npm:6.21.0" + dependencies: + "@typescript-eslint/types": "npm:6.21.0" + "@typescript-eslint/visitor-keys": "npm:6.21.0" + checksum: 10c0/eaf868938d811cbbea33e97e44ba7050d2b6892202cea6a9622c486b85ab1cf801979edf78036179a8ba4ac26f1dfdf7fcc83a68c1ff66be0b3a8e9a9989b526 + languageName: node + linkType: hard + +"@typescript-eslint/scope-manager@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/scope-manager@npm:8.4.0" + dependencies: + "@typescript-eslint/types": "npm:8.4.0" + "@typescript-eslint/visitor-keys": "npm:8.4.0" + checksum: 10c0/95188c663df7db106529c6b93c4c7c61647ed34ab6dd48114e41ddf49140ff606c5501ce2ae451a988ec49b5d3874ea96ff212fc102802327b10affd2ff80a37 + languageName: node + linkType: hard + +"@typescript-eslint/type-utils@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/type-utils@npm:8.4.0" + dependencies: + "@typescript-eslint/typescript-estree": "npm:8.4.0" + "@typescript-eslint/utils": "npm:8.4.0" + debug: "npm:^4.3.4" + ts-api-utils: "npm:^1.3.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/ae51100594d9ca61c7577b5aed0bd10c1959725df5c38cd9653eed1fd3dbdfff9146b6e48f3409994b4c8d781b9d95025c36b30f73a5a1b3dbdee6d142cecc87 + languageName: node + linkType: hard + +"@typescript-eslint/types@npm:5.62.0": + version: 5.62.0 + resolution: "@typescript-eslint/types@npm:5.62.0" + checksum: 10c0/7febd3a7f0701c0b927e094f02e82d8ee2cada2b186fcb938bc2b94ff6fbad88237afc304cbaf33e82797078bbbb1baf91475f6400912f8b64c89be79bfa4ddf + languageName: node + linkType: hard + +"@typescript-eslint/types@npm:6.21.0": + version: 6.21.0 + resolution: "@typescript-eslint/types@npm:6.21.0" + checksum: 10c0/020631d3223bbcff8a0da3efbdf058220a8f48a3de221563996ad1dcc30d6c08dadc3f7608cc08830d21c0d565efd2db19b557b9528921c78aabb605eef2d74d + languageName: node + linkType: hard + +"@typescript-eslint/types@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/types@npm:8.4.0" + checksum: 10c0/15e09ced84827c349553530a31822f06ae5bad456c03d561b7d0c64b6ad9b5d7ca795e030bd93e65d5a2cd41bfde36ed08dcd2ff9feaa8b60a67080827f47ecb + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:5.62.0": + version: 5.62.0 + resolution: "@typescript-eslint/typescript-estree@npm:5.62.0" + dependencies: + "@typescript-eslint/types": "npm:5.62.0" + "@typescript-eslint/visitor-keys": "npm:5.62.0" + debug: "npm:^4.3.4" + globby: "npm:^11.1.0" + is-glob: "npm:^4.0.3" + semver: "npm:^7.3.7" + tsutils: "npm:^3.21.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/d7984a3e9d56897b2481940ec803cb8e7ead03df8d9cfd9797350be82ff765dfcf3cfec04e7355e1779e948da8f02bc5e11719d07a596eb1cb995c48a95e38cf + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:6.21.0": + version: 6.21.0 + resolution: "@typescript-eslint/typescript-estree@npm:6.21.0" + dependencies: + "@typescript-eslint/types": "npm:6.21.0" + "@typescript-eslint/visitor-keys": "npm:6.21.0" + debug: "npm:^4.3.4" + globby: "npm:^11.1.0" + is-glob: "npm:^4.0.3" + minimatch: "npm:9.0.3" + semver: "npm:^7.5.4" + ts-api-utils: "npm:^1.0.1" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/af1438c60f080045ebb330155a8c9bb90db345d5069cdd5d01b67de502abb7449d6c75500519df829f913a6b3f490ade3e8215279b6bdc63d0fb0ae61034df5f + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/typescript-estree@npm:8.4.0" + dependencies: + "@typescript-eslint/types": "npm:8.4.0" + "@typescript-eslint/visitor-keys": "npm:8.4.0" + debug: "npm:^4.3.4" + fast-glob: "npm:^3.3.2" + is-glob: "npm:^4.0.3" + minimatch: "npm:^9.0.4" + semver: "npm:^7.6.0" + ts-api-utils: "npm:^1.3.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/170702b024121cff9268f53de8054796b0ce025f9a78d6f2bc850a360e5f3f7032ba3ee9d4b7392726308273a5f3ade5ab31b1788b504b514bc15afc07302b37 + languageName: node + linkType: hard + +"@typescript-eslint/utils@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/utils@npm:8.4.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.4.0" + "@typescript-eslint/scope-manager": "npm:8.4.0" + "@typescript-eslint/types": "npm:8.4.0" + "@typescript-eslint/typescript-estree": "npm:8.4.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + checksum: 10c0/8c9c36b3aa23f9bcc28cc4b10f0fa2996f1bc6cdd75135f08c2ef734baa30dbd2a8b92f344b90518e1fd07a486936734789fc7e90b780221a7707dad8e9c9364 + languageName: node + linkType: hard + +"@typescript-eslint/utils@npm:^5.62.0": + version: 5.62.0 + resolution: "@typescript-eslint/utils@npm:5.62.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.2.0" + "@types/json-schema": "npm:^7.0.9" + "@types/semver": "npm:^7.3.12" + "@typescript-eslint/scope-manager": "npm:5.62.0" + "@typescript-eslint/types": "npm:5.62.0" + "@typescript-eslint/typescript-estree": "npm:5.62.0" + eslint-scope: "npm:^5.1.1" + semver: "npm:^7.3.7" + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: 10c0/f09b7d9952e4a205eb1ced31d7684dd55cee40bf8c2d78e923aa8a255318d97279825733902742c09d8690f37a50243f4c4d383ab16bd7aefaf9c4b438f785e1 + languageName: node + linkType: hard + +"@typescript-eslint/visitor-keys@npm:5.62.0": + version: 5.62.0 + resolution: "@typescript-eslint/visitor-keys@npm:5.62.0" + dependencies: + "@typescript-eslint/types": "npm:5.62.0" + eslint-visitor-keys: "npm:^3.3.0" + checksum: 10c0/7c3b8e4148e9b94d9b7162a596a1260d7a3efc4e65199693b8025c71c4652b8042501c0bc9f57654c1e2943c26da98c0f77884a746c6ae81389fcb0b513d995d + languageName: node + linkType: hard + +"@typescript-eslint/visitor-keys@npm:6.21.0": + version: 6.21.0 + resolution: "@typescript-eslint/visitor-keys@npm:6.21.0" + dependencies: + "@typescript-eslint/types": "npm:6.21.0" + eslint-visitor-keys: "npm:^3.4.1" + checksum: 10c0/7395f69739cfa1cb83c1fb2fad30afa2a814756367302fb4facd5893eff66abc807e8d8f63eba94ed3b0fe0c1c996ac9a1680bcbf0f83717acedc3f2bb724fbf + languageName: node + linkType: hard + +"@typescript-eslint/visitor-keys@npm:8.4.0": + version: 8.4.0 + resolution: "@typescript-eslint/visitor-keys@npm:8.4.0" + dependencies: + "@typescript-eslint/types": "npm:8.4.0" + eslint-visitor-keys: "npm:^3.4.3" + checksum: 10c0/339199b7fbb9ac83b530d03ab25f6bc5ceb688c9cd0ae460112cd14ee78ca7284a845aef5620cdf70170980123475ec875e85ebf595c60255ba3c0d6fe48c714 + languageName: node + linkType: hard + +"@ungap/structured-clone@npm:^1.0.0, @ungap/structured-clone@npm:^1.2.0": + version: 1.2.0 + resolution: "@ungap/structured-clone@npm:1.2.0" + checksum: 10c0/8209c937cb39119f44eb63cf90c0b73e7c754209a6411c707be08e50e29ee81356dca1a848a405c8bdeebfe2f5e4f831ad310ae1689eeef65e7445c090c6657d + languageName: node + linkType: hard + +"@webassemblyjs/ast@npm:1.12.1, @webassemblyjs/ast@npm:^1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/ast@npm:1.12.1" + dependencies: + "@webassemblyjs/helper-numbers": "npm:1.11.6" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" + checksum: 10c0/ba7f2b96c6e67e249df6156d02c69eb5f1bd18d5005303cdc42accb053bebbbde673826e54db0437c9748e97abd218366a1d13fa46859b23cde611b6b409998c + languageName: node + linkType: hard + +"@webassemblyjs/floating-point-hex-parser@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.11.6" + checksum: 10c0/37fe26f89e18e4ca0e7d89cfe3b9f17cfa327d7daf906ae01400416dbb2e33c8a125b4dc55ad7ff405e5fcfb6cf0d764074c9bc532b9a31a71e762be57d2ea0a + languageName: node + linkType: hard + +"@webassemblyjs/helper-api-error@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-api-error@npm:1.11.6" + checksum: 10c0/a681ed51863e4ff18cf38d223429f414894e5f7496856854d9a886eeddcee32d7c9f66290f2919c9bb6d2fc2b2fae3f989b6a1e02a81e829359738ea0c4d371a + languageName: node + linkType: hard + +"@webassemblyjs/helper-buffer@npm:1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/helper-buffer@npm:1.12.1" + checksum: 10c0/0270724afb4601237410f7fd845ab58ccda1d5456a8783aadfb16eaaf3f2c9610c28e4a5bcb6ad880cde5183c82f7f116d5ccfc2310502439d33f14b6888b48a + languageName: node + linkType: hard + +"@webassemblyjs/helper-numbers@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-numbers@npm:1.11.6" + dependencies: + "@webassemblyjs/floating-point-hex-parser": "npm:1.11.6" + "@webassemblyjs/helper-api-error": "npm:1.11.6" + "@xtuc/long": "npm:4.2.2" + checksum: 10c0/c7d5afc0ff3bd748339b466d8d2f27b908208bf3ff26b2e8e72c39814479d486e0dca6f3d4d776fd9027c1efe05b5c0716c57a23041eb34473892b2731c33af3 + languageName: node + linkType: hard + +"@webassemblyjs/helper-wasm-bytecode@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.11.6" + checksum: 10c0/79d2bebdd11383d142745efa32781249745213af8e022651847382685ca76709f83e1d97adc5f0d3c2b8546bf02864f8b43a531fdf5ca0748cb9e4e0ef2acaa5 + languageName: node + linkType: hard + +"@webassemblyjs/helper-wasm-section@npm:1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/helper-wasm-section@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/helper-buffer": "npm:1.12.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" + "@webassemblyjs/wasm-gen": "npm:1.12.1" + checksum: 10c0/0546350724d285ae3c26e6fc444be4c3b5fb824f3be0ec8ceb474179dc3f4430336dd2e36a44b3e3a1a6815960e5eec98cd9b3a8ec66dc53d86daedd3296a6a2 + languageName: node + linkType: hard + +"@webassemblyjs/ieee754@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/ieee754@npm:1.11.6" + dependencies: + "@xtuc/ieee754": "npm:^1.2.0" + checksum: 10c0/59de0365da450322c958deadade5ec2d300c70f75e17ae55de3c9ce564deff5b429e757d107c7ec69bd0ba169c6b6cc2ff66293ab7264a7053c829b50ffa732f + languageName: node + linkType: hard + +"@webassemblyjs/leb128@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/leb128@npm:1.11.6" + dependencies: + "@xtuc/long": "npm:4.2.2" + checksum: 10c0/cb344fc04f1968209804de4da018679c5d4708a03b472a33e0fa75657bb024978f570d3ccf9263b7f341f77ecaa75d0e051b9cd4b7bb17a339032cfd1c37f96e + languageName: node + linkType: hard + +"@webassemblyjs/utf8@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/utf8@npm:1.11.6" + checksum: 10c0/14d6c24751a89ad9d801180b0d770f30a853c39f035a15fbc96266d6ac46355227abd27a3fd2eeaa97b4294ced2440a6b012750ae17bafe1a7633029a87b6bee + languageName: node + linkType: hard + +"@webassemblyjs/wasm-edit@npm:^1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/wasm-edit@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/helper-buffer": "npm:1.12.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" + "@webassemblyjs/helper-wasm-section": "npm:1.12.1" + "@webassemblyjs/wasm-gen": "npm:1.12.1" + "@webassemblyjs/wasm-opt": "npm:1.12.1" + "@webassemblyjs/wasm-parser": "npm:1.12.1" + "@webassemblyjs/wast-printer": "npm:1.12.1" + checksum: 10c0/972f5e6c522890743999e0ed45260aae728098801c6128856b310dd21f1ee63435fc7b518e30e0ba1cdafd0d1e38275829c1e4451c3536a1d9e726e07a5bba0b + languageName: node + linkType: hard + +"@webassemblyjs/wasm-gen@npm:1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/wasm-gen@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" + "@webassemblyjs/ieee754": "npm:1.11.6" + "@webassemblyjs/leb128": "npm:1.11.6" + "@webassemblyjs/utf8": "npm:1.11.6" + checksum: 10c0/1e257288177af9fa34c69cab94f4d9036ebed611f77f3897c988874e75182eeeec759c79b89a7a49dd24624fc2d3d48d5580b62b67c4a1c9bfbdcd266b281c16 + languageName: node + linkType: hard + +"@webassemblyjs/wasm-opt@npm:1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/wasm-opt@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/helper-buffer": "npm:1.12.1" + "@webassemblyjs/wasm-gen": "npm:1.12.1" + "@webassemblyjs/wasm-parser": "npm:1.12.1" + checksum: 10c0/992a45e1f1871033c36987459436ab4e6430642ca49328e6e32a13de9106fe69ae6c0ac27d7050efd76851e502d11cd1ac0e06b55655dfa889ad82f11a2712fb + languageName: node + linkType: hard + +"@webassemblyjs/wasm-parser@npm:1.12.1, @webassemblyjs/wasm-parser@npm:^1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/wasm-parser@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/helper-api-error": "npm:1.11.6" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" + "@webassemblyjs/ieee754": "npm:1.11.6" + "@webassemblyjs/leb128": "npm:1.11.6" + "@webassemblyjs/utf8": "npm:1.11.6" + checksum: 10c0/e85cec1acad07e5eb65b92d37c8e6ca09c6ca50d7ca58803a1532b452c7321050a0328c49810c337cc2dfd100c5326a54d5ebd1aa5c339ebe6ef10c250323a0e + languageName: node + linkType: hard + +"@webassemblyjs/wast-printer@npm:1.12.1": + version: 1.12.1 + resolution: "@webassemblyjs/wast-printer@npm:1.12.1" + dependencies: + "@webassemblyjs/ast": "npm:1.12.1" + "@xtuc/long": "npm:4.2.2" + checksum: 10c0/39bf746eb7a79aa69953f194943bbc43bebae98bd7cadd4d8bc8c0df470ca6bf9d2b789effaa180e900fab4e2691983c1f7d41571458bd2a26267f2f0c73705a + languageName: node + linkType: hard + +"@xtuc/ieee754@npm:^1.2.0": + version: 1.2.0 + resolution: "@xtuc/ieee754@npm:1.2.0" + checksum: 10c0/a8565d29d135039bd99ae4b2220d3e167d22cf53f867e491ed479b3f84f895742d0097f935b19aab90265a23d5d46711e4204f14c479ae3637fbf06c4666882f + languageName: node + linkType: hard + +"@xtuc/long@npm:4.2.2": + version: 4.2.2 + resolution: "@xtuc/long@npm:4.2.2" + checksum: 10c0/8582cbc69c79ad2d31568c412129bf23d2b1210a1dfb60c82d5a1df93334da4ee51f3057051658569e2c196d8dc33bc05ae6b974a711d0d16e801e1d0647ccd1 + languageName: node + linkType: hard + +"abbrev@npm:^2.0.0": + version: 2.0.0 + resolution: "abbrev@npm:2.0.0" + checksum: 10c0/f742a5a107473946f426c691c08daba61a1d15942616f300b5d32fd735be88fef5cba24201757b6c407fd564555fb48c751cfa33519b2605c8a7aadd22baf372 + languageName: node + linkType: hard + +"accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": + version: 1.3.8 + resolution: "accepts@npm:1.3.8" + dependencies: + mime-types: "npm:~2.1.34" + negotiator: "npm:0.6.3" + checksum: 10c0/3a35c5f5586cfb9a21163ca47a5f77ac34fa8ceb5d17d2fa2c0d81f41cbd7f8c6fa52c77e2c039acc0f4d09e71abdc51144246900f6bef5e3c4b333f77d89362 + languageName: node + linkType: hard + +"acorn-import-attributes@npm:^1.9.5": + version: 1.9.5 + resolution: "acorn-import-attributes@npm:1.9.5" + peerDependencies: + acorn: ^8 + checksum: 10c0/5926eaaead2326d5a86f322ff1b617b0f698aa61dc719a5baa0e9d955c9885cc71febac3fb5bacff71bbf2c4f9c12db2056883c68c53eb962c048b952e1e013d + languageName: node + linkType: hard + +"acorn-jsx@npm:^5.0.0, acorn-jsx@npm:^5.3.2": + version: 5.3.2 + resolution: "acorn-jsx@npm:5.3.2" + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: 10c0/4c54868fbef3b8d58927d5e33f0a4de35f59012fe7b12cf9dfbb345fb8f46607709e1c4431be869a23fb63c151033d84c4198fa9f79385cec34fcb1dd53974c1 + languageName: node + linkType: hard + +"acorn-walk@npm:^8.0.0": + version: 8.3.3 + resolution: "acorn-walk@npm:8.3.3" + dependencies: + acorn: "npm:^8.11.0" + checksum: 10c0/4a9e24313e6a0a7b389e712ba69b66b455b4cb25988903506a8d247e7b126f02060b05a8a5b738a9284214e4ca95f383dd93443a4ba84f1af9b528305c7f243b + languageName: node + linkType: hard + +"acorn@npm:^8.0.0, acorn@npm:^8.0.4, acorn@npm:^8.11.0, acorn@npm:^8.11.3, acorn@npm:^8.7.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0": + version: 8.12.1 + resolution: "acorn@npm:8.12.1" + bin: + acorn: bin/acorn + checksum: 10c0/51fb26cd678f914e13287e886da2d7021f8c2bc0ccc95e03d3e0447ee278dd3b40b9c57dc222acd5881adcf26f3edc40901a4953403232129e3876793cd17386 + languageName: node + linkType: hard + +"address@npm:^1.0.1, address@npm:^1.1.2": + version: 1.2.2 + resolution: "address@npm:1.2.2" + checksum: 10c0/1c8056b77fb124456997b78ed682ecc19d2fd7ea8bd5850a2aa8c3e3134c913847c57bcae418622efd32ba858fa1e242a40a251ac31da0515664fc0ac03a047d + languageName: node + linkType: hard + +"agent-base@npm:^7.0.2, agent-base@npm:^7.1.0, agent-base@npm:^7.1.1": + version: 7.1.1 + resolution: "agent-base@npm:7.1.1" + dependencies: + debug: "npm:^4.3.4" + checksum: 10c0/e59ce7bed9c63bf071a30cc471f2933862044c97fd9958967bfe22521d7a0f601ce4ed5a8c011799d0c726ca70312142ae193bbebb60f576b52be19d4a363b50 + languageName: node + linkType: hard + +"aggregate-error@npm:^3.0.0": + version: 3.1.0 + resolution: "aggregate-error@npm:3.1.0" + dependencies: + clean-stack: "npm:^2.0.0" + indent-string: "npm:^4.0.0" + checksum: 10c0/a42f67faa79e3e6687a4923050e7c9807db3848a037076f791d10e092677d65c1d2d863b7848560699f40fc0502c19f40963fb1cd1fb3d338a7423df8e45e039 + languageName: node + linkType: hard + +"ajv-formats@npm:^2.1.1": + version: 2.1.1 + resolution: "ajv-formats@npm:2.1.1" + dependencies: + ajv: "npm:^8.0.0" + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + checksum: 10c0/e43ba22e91b6a48d96224b83d260d3a3a561b42d391f8d3c6d2c1559f9aa5b253bfb306bc94bbeca1d967c014e15a6efe9a207309e95b3eaae07fcbcdc2af662 + languageName: node + linkType: hard + +"ajv-keywords@npm:^3.4.1, ajv-keywords@npm:^3.5.2": + version: 3.5.2 + resolution: "ajv-keywords@npm:3.5.2" + peerDependencies: + ajv: ^6.9.1 + checksum: 10c0/0c57a47cbd656e8cdfd99d7c2264de5868918ffa207c8d7a72a7f63379d4333254b2ba03d69e3c035e996a3fd3eb6d5725d7a1597cca10694296e32510546360 + languageName: node + linkType: hard + +"ajv-keywords@npm:^5.1.0": + version: 5.1.0 + resolution: "ajv-keywords@npm:5.1.0" + dependencies: + fast-deep-equal: "npm:^3.1.3" + peerDependencies: + ajv: ^8.8.2 + checksum: 10c0/18bec51f0171b83123ba1d8883c126e60c6f420cef885250898bf77a8d3e65e3bfb9e8564f497e30bdbe762a83e0d144a36931328616a973ee669dc74d4a9590 + languageName: node + linkType: hard + +"ajv@npm:^6.12.2, ajv@npm:^6.12.4, ajv@npm:^6.12.5": + version: 6.12.6 + resolution: "ajv@npm:6.12.6" + dependencies: + fast-deep-equal: "npm:^3.1.1" + fast-json-stable-stringify: "npm:^2.0.0" + json-schema-traverse: "npm:^0.4.1" + uri-js: "npm:^4.2.2" + checksum: 10c0/41e23642cbe545889245b9d2a45854ebba51cda6c778ebced9649420d9205f2efb39cb43dbc41e358409223b1ea43303ae4839db682c848b891e4811da1a5a71 + languageName: node + linkType: hard + +"ajv@npm:^8.0.0, ajv@npm:^8.9.0": + version: 8.17.1 + resolution: "ajv@npm:8.17.1" + dependencies: + fast-deep-equal: "npm:^3.1.3" + fast-uri: "npm:^3.0.1" + json-schema-traverse: "npm:^1.0.0" + require-from-string: "npm:^2.0.2" + checksum: 10c0/ec3ba10a573c6b60f94639ffc53526275917a2df6810e4ab5a6b959d87459f9ef3f00d5e7865b82677cb7d21590355b34da14d1d0b9c32d75f95a187e76fff35 + languageName: node + linkType: hard + +"algoliasearch-helper@npm:^3.13.3": + version: 3.22.4 + resolution: "algoliasearch-helper@npm:3.22.4" + dependencies: + "@algolia/events": "npm:^4.0.1" + peerDependencies: + algoliasearch: ">= 3.1 < 6" + checksum: 10c0/84108699d89c5cd8a2017c52b13704403797e02389678d8bc2a489da46886365acd95aef0bc87642cb9c84f974bd0ac25b74cdecfc3cca3041afdfa07f78821f + languageName: node + linkType: hard + +"algoliasearch@npm:^4.18.0, algoliasearch@npm:^4.19.1": + version: 4.24.0 + resolution: "algoliasearch@npm:4.24.0" + dependencies: + "@algolia/cache-browser-local-storage": "npm:4.24.0" + "@algolia/cache-common": "npm:4.24.0" + "@algolia/cache-in-memory": "npm:4.24.0" + "@algolia/client-account": "npm:4.24.0" + "@algolia/client-analytics": "npm:4.24.0" + "@algolia/client-common": "npm:4.24.0" + "@algolia/client-personalization": "npm:4.24.0" + "@algolia/client-search": "npm:4.24.0" + "@algolia/logger-common": "npm:4.24.0" + "@algolia/logger-console": "npm:4.24.0" + "@algolia/recommend": "npm:4.24.0" + "@algolia/requester-browser-xhr": "npm:4.24.0" + "@algolia/requester-common": "npm:4.24.0" + "@algolia/requester-node-http": "npm:4.24.0" + "@algolia/transporter": "npm:4.24.0" + checksum: 10c0/ef09096619191181f3ea3376ed46b5bb2de1cd7d97a8d016f7cfe8e93c89d34f38cac8db5835314f8d97c939ad007c3dde716c1609953540258352edb25d12c2 + languageName: node + linkType: hard + +"ansi-align@npm:^3.0.1": + version: 3.0.1 + resolution: "ansi-align@npm:3.0.1" + dependencies: + string-width: "npm:^4.1.0" + checksum: 10c0/ad8b755a253a1bc8234eb341e0cec68a857ab18bf97ba2bda529e86f6e30460416523e0ec58c32e5c21f0ca470d779503244892873a5895dbd0c39c788e82467 + languageName: node + linkType: hard + +"ansi-html-community@npm:^0.0.8": + version: 0.0.8 + resolution: "ansi-html-community@npm:0.0.8" + bin: + ansi-html: bin/ansi-html + checksum: 10c0/45d3a6f0b4f10b04fdd44bef62972e2470bfd917bf00439471fa7473d92d7cbe31369c73db863cc45dda115cb42527f39e232e9256115534b8ee5806b0caeed4 + languageName: node + linkType: hard + +"ansi-regex@npm:^2.0.0": + version: 2.1.1 + resolution: "ansi-regex@npm:2.1.1" + checksum: 10c0/78cebaf50bce2cb96341a7230adf28d804611da3ce6bf338efa7b72f06cc6ff648e29f80cd95e582617ba58d5fdbec38abfeed3500a98bce8381a9daec7c548b + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 + languageName: node + linkType: hard + +"ansi-regex@npm:^6.0.1": + version: 6.0.1 + resolution: "ansi-regex@npm:6.0.1" + checksum: 10c0/cbe16dbd2c6b2735d1df7976a7070dd277326434f0212f43abf6d87674095d247968209babdaad31bb00882fa68807256ba9be340eec2f1004de14ca75f52a08 + languageName: node + linkType: hard + +"ansi-styles@npm:^2.2.1": + version: 2.2.1 + resolution: "ansi-styles@npm:2.2.1" + checksum: 10c0/7c68aed4f1857389e7a12f85537ea5b40d832656babbf511cc7ecd9efc52889b9c3e5653a71a6aade783c3c5e0aa223ad4ff8e83c27ac8a666514e6c79068cab + languageName: node + linkType: hard + +"ansi-styles@npm:^3.2.1": + version: 3.2.1 + resolution: "ansi-styles@npm:3.2.1" + dependencies: + color-convert: "npm:^1.9.0" + checksum: 10c0/ece5a8ef069fcc5298f67e3f4771a663129abd174ea2dfa87923a2be2abf6cd367ef72ac87942da00ce85bd1d651d4cd8595aebdb1b385889b89b205860e977b + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: "npm:^2.0.1" + checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 + languageName: node + linkType: hard + +"ansi-styles@npm:^5.0.0": + version: 5.2.0 + resolution: "ansi-styles@npm:5.2.0" + checksum: 10c0/9c4ca80eb3c2fb7b33841c210d2f20807f40865d27008d7c3f707b7f95cab7d67462a565e2388ac3285b71cb3d9bb2173de8da37c57692a362885ec34d6e27df + languageName: node + linkType: hard + +"ansi-styles@npm:^6.1.0": + version: 6.2.1 + resolution: "ansi-styles@npm:6.2.1" + checksum: 10c0/5d1ec38c123984bcedd996eac680d548f31828bd679a66db2bdf11844634dde55fec3efa9c6bb1d89056a5e79c1ac540c4c784d592ea1d25028a92227d2f2d5c + languageName: node + linkType: hard + +"anymatch@npm:~3.1.2": + version: 3.1.3 + resolution: "anymatch@npm:3.1.3" + dependencies: + normalize-path: "npm:^3.0.0" + picomatch: "npm:^2.0.4" + checksum: 10c0/57b06ae984bc32a0d22592c87384cd88fe4511b1dd7581497831c56d41939c8a001b28e7b853e1450f2bf61992dfcaa8ae2d0d161a0a90c4fb631ef07098fbac + languageName: node + linkType: hard + +"arg@npm:^5.0.0": + version: 5.0.2 + resolution: "arg@npm:5.0.2" + checksum: 10c0/ccaf86f4e05d342af6666c569f844bec426595c567d32a8289715087825c2ca7edd8a3d204e4d2fb2aa4602e09a57d0c13ea8c9eea75aac3dbb4af5514e6800e + languageName: node + linkType: hard + +"argparse@npm:^1.0.7": + version: 1.0.10 + resolution: "argparse@npm:1.0.10" + dependencies: + sprintf-js: "npm:~1.0.2" + checksum: 10c0/b2972c5c23c63df66bca144dbc65d180efa74f25f8fd9b7d9a0a6c88ae839db32df3d54770dcb6460cf840d232b60695d1a6b1053f599d84e73f7437087712de + languageName: node + linkType: hard + +"argparse@npm:^2.0.1": + version: 2.0.1 + resolution: "argparse@npm:2.0.1" + checksum: 10c0/c5640c2d89045371c7cedd6a70212a04e360fd34d6edeae32f6952c63949e3525ea77dbec0289d8213a99bbaeab5abfa860b5c12cf88a2e6cf8106e90dd27a7e + languageName: node + linkType: hard + +"array-buffer-byte-length@npm:^1.0.1": + version: 1.0.1 + resolution: "array-buffer-byte-length@npm:1.0.1" + dependencies: + call-bind: "npm:^1.0.5" + is-array-buffer: "npm:^3.0.4" + checksum: 10c0/f5cdf54527cd18a3d2852ddf73df79efec03829e7373a8322ef5df2b4ef546fb365c19c71d6b42d641cb6bfe0f1a2f19bc0ece5b533295f86d7c3d522f228917 + languageName: node + linkType: hard + +"array-flatten@npm:1.1.1": + version: 1.1.1 + resolution: "array-flatten@npm:1.1.1" + checksum: 10c0/806966c8abb2f858b08f5324d9d18d7737480610f3bd5d3498aaae6eb5efdc501a884ba019c9b4a8f02ff67002058749d05548fd42fa8643f02c9c7f22198b91 + languageName: node + linkType: hard + +"array-includes@npm:^3.1.6, array-includes@npm:^3.1.8": + version: 3.1.8 + resolution: "array-includes@npm:3.1.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + es-object-atoms: "npm:^1.0.0" + get-intrinsic: "npm:^1.2.4" + is-string: "npm:^1.0.7" + checksum: 10c0/5b1004d203e85873b96ddc493f090c9672fd6c80d7a60b798da8a14bff8a670ff95db5aafc9abc14a211943f05220dacf8ea17638ae0af1a6a47b8c0b48ce370 + languageName: node + linkType: hard + +"array-union@npm:^2.1.0": + version: 2.1.0 + resolution: "array-union@npm:2.1.0" + checksum: 10c0/429897e68110374f39b771ec47a7161fc6a8fc33e196857c0a396dc75df0b5f65e4d046674db764330b6bb66b39ef48dd7c53b6a2ee75cfb0681e0c1a7033962 + languageName: node + linkType: hard + +"array.prototype.findlast@npm:^1.2.5": + version: 1.2.5 + resolution: "array.prototype.findlast@npm:1.2.5" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.0.0" + es-shim-unscopables: "npm:^1.0.2" + checksum: 10c0/ddc952b829145ab45411b9d6adcb51a8c17c76bf89c9dd64b52d5dffa65d033da8c076ed2e17091779e83bc892b9848188d7b4b33453c5565e65a92863cb2775 + languageName: node + linkType: hard + +"array.prototype.flat@npm:^1.3.1": + version: 1.3.2 + resolution: "array.prototype.flat@npm:1.3.2" + dependencies: + call-bind: "npm:^1.0.2" + define-properties: "npm:^1.2.0" + es-abstract: "npm:^1.22.1" + es-shim-unscopables: "npm:^1.0.0" + checksum: 10c0/a578ed836a786efbb6c2db0899ae80781b476200617f65a44846cb1ed8bd8b24c8821b83703375d8af639c689497b7b07277060024b9919db94ac3e10dc8a49b + languageName: node + linkType: hard + +"array.prototype.flatmap@npm:^1.3.2": + version: 1.3.2 + resolution: "array.prototype.flatmap@npm:1.3.2" + dependencies: + call-bind: "npm:^1.0.2" + define-properties: "npm:^1.2.0" + es-abstract: "npm:^1.22.1" + es-shim-unscopables: "npm:^1.0.0" + checksum: 10c0/67b3f1d602bb73713265145853128b1ad77cc0f9b833c7e1e056b323fbeac41a4ff1c9c99c7b9445903caea924d9ca2450578d9011913191aa88cc3c3a4b54f4 + languageName: node + linkType: hard + +"array.prototype.tosorted@npm:^1.1.4": + version: 1.1.4 + resolution: "array.prototype.tosorted@npm:1.1.4" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.3" + es-errors: "npm:^1.3.0" + es-shim-unscopables: "npm:^1.0.2" + checksum: 10c0/eb3c4c4fc0381b0bf6dba2ea4d48d367c2827a0d4236a5718d97caaccc6b78f11f4cadf090736e86301d295a6aa4967ed45568f92ced51be8cbbacd9ca410943 + languageName: node + linkType: hard + +"arraybuffer.prototype.slice@npm:^1.0.3": + version: 1.0.3 + resolution: "arraybuffer.prototype.slice@npm:1.0.3" + dependencies: + array-buffer-byte-length: "npm:^1.0.1" + call-bind: "npm:^1.0.5" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.22.3" + es-errors: "npm:^1.2.1" + get-intrinsic: "npm:^1.2.3" + is-array-buffer: "npm:^3.0.4" + is-shared-array-buffer: "npm:^1.0.2" + checksum: 10c0/d32754045bcb2294ade881d45140a5e52bda2321b9e98fa514797b7f0d252c4c5ab0d1edb34112652c62fa6a9398def568da63a4d7544672229afea283358c36 + languageName: node + linkType: hard + +"astring@npm:^1.8.0": + version: 1.8.6 + resolution: "astring@npm:1.8.6" + bin: + astring: bin/astring + checksum: 10c0/31f09144597048c11072417959a412f208f8f95ba8dce408dfbc3367acb929f31fbcc00ed5eb61ccbf7c2f1173b9ac8bfcaaa37134a9455050c669b2b036ed88 + languageName: node + linkType: hard + +"at-least-node@npm:^1.0.0": + version: 1.0.0 + resolution: "at-least-node@npm:1.0.0" + checksum: 10c0/4c058baf6df1bc5a1697cf182e2029c58cd99975288a13f9e70068ef5d6f4e1f1fd7c4d2c3c4912eae44797d1725be9700995736deca441b39f3e66d8dee97ef + languageName: node + linkType: hard + +"autoprefixer@npm:^10.4.14, autoprefixer@npm:^10.4.19": + version: 10.4.20 + resolution: "autoprefixer@npm:10.4.20" + dependencies: + browserslist: "npm:^4.23.3" + caniuse-lite: "npm:^1.0.30001646" + fraction.js: "npm:^4.3.7" + normalize-range: "npm:^0.1.2" + picocolors: "npm:^1.0.1" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.1.0 + bin: + autoprefixer: bin/autoprefixer + checksum: 10c0/e1f00978a26e7c5b54ab12036d8c13833fad7222828fc90914771b1263f51b28c7ddb5803049de4e77696cbd02bb25cfc3634e80533025bb26c26aacdf938940 + languageName: node + linkType: hard + +"available-typed-arrays@npm:^1.0.7": + version: 1.0.7 + resolution: "available-typed-arrays@npm:1.0.7" + dependencies: + possible-typed-array-names: "npm:^1.0.0" + checksum: 10c0/d07226ef4f87daa01bd0fe80f8f310982e345f372926da2e5296aecc25c41cab440916bbaa4c5e1034b453af3392f67df5961124e4b586df1e99793a1374bdb2 + languageName: node + linkType: hard + +"b4a@npm:^1.6.4": + version: 1.6.6 + resolution: "b4a@npm:1.6.6" + checksum: 10c0/56f30277666cb511a15829e38d369b114df7dc8cec4cedc09cc5d685bc0f27cb63c7bcfb58e09a19a1b3c4f2541069ab078b5328542e85d74a39620327709a38 + languageName: node + linkType: hard + +"babel-loader@npm:^9.1.3": + version: 9.1.3 + resolution: "babel-loader@npm:9.1.3" + dependencies: + find-cache-dir: "npm:^4.0.0" + schema-utils: "npm:^4.0.0" + peerDependencies: + "@babel/core": ^7.12.0 + webpack: ">=5" + checksum: 10c0/e3fc3c9e02bd908b37e8e8cd4f3d7280cf6ac45e33fc203aedbb615135a0fecc33bf92573b71a166a827af029d302c0b060354985cd91d510320bd70a2f949eb + languageName: node + linkType: hard + +"babel-plugin-dynamic-import-node@npm:^2.3.3": + version: 2.3.3 + resolution: "babel-plugin-dynamic-import-node@npm:2.3.3" + dependencies: + object.assign: "npm:^4.1.0" + checksum: 10c0/1bd80df981e1fc1aff0cd4e390cf27aaa34f95f7620cd14dff07ba3bad56d168c098233a7d2deb2c9b1dc13643e596a6b94fc608a3412ee3c56e74a25cd2167e + languageName: node + linkType: hard + +"babel-plugin-polyfill-corejs2@npm:^0.4.10": + version: 0.4.11 + resolution: "babel-plugin-polyfill-corejs2@npm:0.4.11" + dependencies: + "@babel/compat-data": "npm:^7.22.6" + "@babel/helper-define-polyfill-provider": "npm:^0.6.2" + semver: "npm:^6.3.1" + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 10c0/b2217bc8d5976cf8142453ed44daabf0b2e0e75518f24eac83b54a8892e87a88f1bd9089daa92fd25df979ecd0acfd29b6bc28c4182c1c46344cee15ef9bce84 + languageName: node + linkType: hard + +"babel-plugin-polyfill-corejs3@npm:^0.10.1, babel-plugin-polyfill-corejs3@npm:^0.10.4": + version: 0.10.6 + resolution: "babel-plugin-polyfill-corejs3@npm:0.10.6" + dependencies: + "@babel/helper-define-polyfill-provider": "npm:^0.6.2" + core-js-compat: "npm:^3.38.0" + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 10c0/3a69220471b07722c2ae6537310bf26b772514e12b601398082965459c838be70a0ca70b0662f0737070654ff6207673391221d48599abb4a2b27765206d9f79 + languageName: node + linkType: hard + +"babel-plugin-polyfill-regenerator@npm:^0.6.1": + version: 0.6.2 + resolution: "babel-plugin-polyfill-regenerator@npm:0.6.2" + dependencies: + "@babel/helper-define-polyfill-provider": "npm:^0.6.2" + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 10c0/bc541037cf7620bc84ddb75a1c0ce3288f90e7d2799c070a53f8a495c8c8ae0316447becb06f958dd25dcce2a2fce855d318ecfa48036a1ddb218d55aa38a744 + languageName: node + linkType: hard + +"bail@npm:^2.0.0": + version: 2.0.2 + resolution: "bail@npm:2.0.2" + checksum: 10c0/25cbea309ef6a1f56214187004e8f34014eb015713ea01fa5b9b7e9e776ca88d0fdffd64143ac42dc91966c915a4b7b683411b56e14929fad16153fc026ffb8b + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee + languageName: node + linkType: hard + +"bare-events@npm:^2.0.0, bare-events@npm:^2.2.0": + version: 2.4.2 + resolution: "bare-events@npm:2.4.2" + checksum: 10c0/09fa923061f31f815e83504e2ed4a8ba87732a01db40a7fae703dbb7eef7f05d99264b5e186074cbe9698213990d1af564c62cca07a5ff88baea8099ad9a6303 + languageName: node + linkType: hard + +"bare-fs@npm:^2.1.1": + version: 2.3.1 + resolution: "bare-fs@npm:2.3.1" + dependencies: + bare-events: "npm:^2.0.0" + bare-path: "npm:^2.0.0" + bare-stream: "npm:^2.0.0" + checksum: 10c0/820979ad3dd8693076ba08af842e41b5119fcca63f4324b8f28d96b96050cd260085dffd1169dc644f20746fadb4cf4368b317f2fa2db4e40890921ceb557581 + languageName: node + linkType: hard + +"bare-os@npm:^2.1.0": + version: 2.4.0 + resolution: "bare-os@npm:2.4.0" + checksum: 10c0/85615522fd8309d3815d3bef227623f008fac34e037459294a7e24bb2b51ea125597274b8aa7e7038f82de89c15e2148fef299eece40ec3ea33797a357c4f2bb + languageName: node + linkType: hard + +"bare-path@npm:^2.0.0, bare-path@npm:^2.1.0": + version: 2.1.3 + resolution: "bare-path@npm:2.1.3" + dependencies: + bare-os: "npm:^2.1.0" + checksum: 10c0/35587e177fc8fa5b13fb90bac8779b5ce49c99016d221ddaefe2232d02bd4295d79b941e14ae19fda75ec42a6fe5fb66c07d83ae7ec11462178e66b7be65ca74 + languageName: node + linkType: hard + +"bare-stream@npm:^2.0.0": + version: 2.1.3 + resolution: "bare-stream@npm:2.1.3" + dependencies: + streamx: "npm:^2.18.0" + checksum: 10c0/8703b1d80318496ea560483943d5f425a160ded8d3d75659571842caf5f374f52668809bc1e39b032af14df7210973995efaf273f8c35986bef697380ef4674a + languageName: node + linkType: hard + +"base64-js@npm:^1.3.1": + version: 1.5.1 + resolution: "base64-js@npm:1.5.1" + checksum: 10c0/f23823513b63173a001030fae4f2dabe283b99a9d324ade3ad3d148e218134676f1ee8568c877cd79ec1c53158dcf2d2ba527a97c606618928ba99dd930102bf + languageName: node + linkType: hard + +"batch@npm:0.6.1": + version: 0.6.1 + resolution: "batch@npm:0.6.1" + checksum: 10c0/925a13897b4db80d4211082fe287bcf96d297af38e26448c857cee3e095c9792e3b8f26b37d268812e7f38a589f694609de8534a018b1937d7dc9f84e6b387c5 + languageName: node + linkType: hard + +"big.js@npm:^5.2.2": + version: 5.2.2 + resolution: "big.js@npm:5.2.2" + checksum: 10c0/230520f1ff920b2d2ce3e372d77a33faa4fa60d802fe01ca4ffbc321ee06023fe9a741ac02793ee778040a16b7e497f7d60c504d1c402b8fdab6f03bb785a25f + languageName: node + linkType: hard + +"binary-extensions@npm:^2.0.0": + version: 2.3.0 + resolution: "binary-extensions@npm:2.3.0" + checksum: 10c0/75a59cafc10fb12a11d510e77110c6c7ae3f4ca22463d52487709ca7f18f69d886aa387557cc9864fbdb10153d0bdb4caacabf11541f55e89ed6e18d12ece2b5 + languageName: node + linkType: hard + +"bl@npm:^4.0.3": + version: 4.1.0 + resolution: "bl@npm:4.1.0" + dependencies: + buffer: "npm:^5.5.0" + inherits: "npm:^2.0.4" + readable-stream: "npm:^3.4.0" + checksum: 10c0/02847e1d2cb089c9dc6958add42e3cdeaf07d13f575973963335ac0fdece563a50ac770ac4c8fa06492d2dd276f6cc3b7f08c7cd9c7a7ad0f8d388b2a28def5f + languageName: node + linkType: hard + +"body-parser@npm:1.20.3": + version: 1.20.3 + resolution: "body-parser@npm:1.20.3" + dependencies: + bytes: "npm:3.1.2" + content-type: "npm:~1.0.5" + debug: "npm:2.6.9" + depd: "npm:2.0.0" + destroy: "npm:1.2.0" + http-errors: "npm:2.0.0" + iconv-lite: "npm:0.4.24" + on-finished: "npm:2.4.1" + qs: "npm:6.13.0" + raw-body: "npm:2.5.2" + type-is: "npm:~1.6.18" + unpipe: "npm:1.0.0" + checksum: 10c0/0a9a93b7518f222885498dcecaad528cf010dd109b071bf471c93def4bfe30958b83e03496eb9c1ad4896db543d999bb62be1a3087294162a88cfa1b42c16310 + languageName: node + linkType: hard + +"bonjour-service@npm:^1.0.11": + version: 1.2.1 + resolution: "bonjour-service@npm:1.2.1" + dependencies: + fast-deep-equal: "npm:^3.1.3" + multicast-dns: "npm:^7.2.5" + checksum: 10c0/953cbfc27fc9e36e6f988012993ab2244817d82426603e0390d4715639031396c932b6657b1aa4ec30dbb5fa903d6b2c7f1be3af7a8ba24165c93e987c849730 + languageName: node + linkType: hard + +"boolbase@npm:^1.0.0": + version: 1.0.0 + resolution: "boolbase@npm:1.0.0" + checksum: 10c0/e4b53deb4f2b85c52be0e21a273f2045c7b6a6ea002b0e139c744cb6f95e9ec044439a52883b0d74dedd1ff3da55ed140cfdddfed7fb0cccbed373de5dce1bcf + languageName: node + linkType: hard + +"boxen@npm:^6.2.1": + version: 6.2.1 + resolution: "boxen@npm:6.2.1" + dependencies: + ansi-align: "npm:^3.0.1" + camelcase: "npm:^6.2.0" + chalk: "npm:^4.1.2" + cli-boxes: "npm:^3.0.0" + string-width: "npm:^5.0.1" + type-fest: "npm:^2.5.0" + widest-line: "npm:^4.0.1" + wrap-ansi: "npm:^8.0.1" + checksum: 10c0/2a50d059c950a50d9f3c873093702747740814ce8819225c4f8cbe92024c9f5a9219d2b7128f5cfa17c022644d929bbbc88b9591de67249c6ebe07f7486bdcfd + languageName: node + linkType: hard + +"boxen@npm:^7.0.0": + version: 7.1.1 + resolution: "boxen@npm:7.1.1" + dependencies: + ansi-align: "npm:^3.0.1" + camelcase: "npm:^7.0.1" + chalk: "npm:^5.2.0" + cli-boxes: "npm:^3.0.0" + string-width: "npm:^5.1.2" + type-fest: "npm:^2.13.0" + widest-line: "npm:^4.0.1" + wrap-ansi: "npm:^8.1.0" + checksum: 10c0/3a9891dc98ac40d582c9879e8165628258e2c70420c919e70fff0a53ccc7b42825e73cda6298199b2fbc1f41f5d5b93b492490ad2ae27623bed3897ddb4267f8 + languageName: node + linkType: hard + +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" + dependencies: + balanced-match: "npm:^1.0.0" + concat-map: "npm:0.0.1" + checksum: 10c0/695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 + languageName: node + linkType: hard + +"brace-expansion@npm:^2.0.1": + version: 2.0.1 + resolution: "brace-expansion@npm:2.0.1" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10c0/b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f + languageName: node + linkType: hard + +"braces@npm:^3.0.3, braces@npm:~3.0.2": + version: 3.0.3 + resolution: "braces@npm:3.0.3" + dependencies: + fill-range: "npm:^7.1.1" + checksum: 10c0/7c6dfd30c338d2997ba77500539227b9d1f85e388a5f43220865201e407e076783d0881f2d297b9f80951b4c957fcf0b51c1d2d24227631643c3f7c284b0aa04 + languageName: node + linkType: hard + +"browserslist@npm:^4.0.0, browserslist@npm:^4.18.1, browserslist@npm:^4.21.10, browserslist@npm:^4.23.0, browserslist@npm:^4.23.1, browserslist@npm:^4.23.3": + version: 4.23.3 + resolution: "browserslist@npm:4.23.3" + dependencies: + caniuse-lite: "npm:^1.0.30001646" + electron-to-chromium: "npm:^1.5.4" + node-releases: "npm:^2.0.18" + update-browserslist-db: "npm:^1.1.0" + bin: + browserslist: cli.js + checksum: 10c0/3063bfdf812815346447f4796c8f04601bf5d62003374305fd323c2a463e42776475bcc5309264e39bcf9a8605851e53560695991a623be988138b3ff8c66642 + languageName: node + linkType: hard + +"buffer-from@npm:^1.0.0": + version: 1.1.2 + resolution: "buffer-from@npm:1.1.2" + checksum: 10c0/124fff9d66d691a86d3b062eff4663fe437a9d9ee4b47b1b9e97f5a5d14f6d5399345db80f796827be7c95e70a8e765dd404b7c3ff3b3324f98e9b0c8826cc34 + languageName: node + linkType: hard + +"buffer@npm:^5.5.0": + version: 5.7.1 + resolution: "buffer@npm:5.7.1" + dependencies: + base64-js: "npm:^1.3.1" + ieee754: "npm:^1.1.13" + checksum: 10c0/27cac81cff434ed2876058d72e7c4789d11ff1120ef32c9de48f59eab58179b66710c488987d295ae89a228f835fc66d088652dffeb8e3ba8659f80eb091d55e + languageName: node + linkType: hard + +"bytes@npm:3.0.0": + version: 3.0.0 + resolution: "bytes@npm:3.0.0" + checksum: 10c0/91d42c38601c76460519ffef88371caacaea483a354c8e4b8808e7b027574436a5713337c003ea3de63ee4991c2a9a637884fdfe7f761760d746929d9e8fec60 + languageName: node + linkType: hard + +"bytes@npm:3.1.2": + version: 3.1.2 + resolution: "bytes@npm:3.1.2" + checksum: 10c0/76d1c43cbd602794ad8ad2ae94095cddeb1de78c5dddaa7005c51af10b0176c69971a6d88e805a90c2b6550d76636e43c40d8427a808b8645ede885de4a0358e + languageName: node + linkType: hard + +"cacache@npm:^18.0.0": + version: 18.0.4 + resolution: "cacache@npm:18.0.4" + dependencies: + "@npmcli/fs": "npm:^3.1.0" + fs-minipass: "npm:^3.0.0" + glob: "npm:^10.2.2" + lru-cache: "npm:^10.0.1" + minipass: "npm:^7.0.3" + minipass-collect: "npm:^2.0.1" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + p-map: "npm:^4.0.0" + ssri: "npm:^10.0.0" + tar: "npm:^6.1.11" + unique-filename: "npm:^3.0.0" + checksum: 10c0/6c055bafed9de4f3dcc64ac3dc7dd24e863210902b7c470eb9ce55a806309b3efff78033e3d8b4f7dcc5d467f2db43c6a2857aaaf26f0094b8a351d44c42179f + languageName: node + linkType: hard + +"cacheable-lookup@npm:^7.0.0": + version: 7.0.0 + resolution: "cacheable-lookup@npm:7.0.0" + checksum: 10c0/63a9c144c5b45cb5549251e3ea774c04d63063b29e469f7584171d059d3a88f650f47869a974e2d07de62116463d742c287a81a625e791539d987115cb081635 + languageName: node + linkType: hard + +"cacheable-request@npm:^10.2.8": + version: 10.2.14 + resolution: "cacheable-request@npm:10.2.14" + dependencies: + "@types/http-cache-semantics": "npm:^4.0.2" + get-stream: "npm:^6.0.1" + http-cache-semantics: "npm:^4.1.1" + keyv: "npm:^4.5.3" + mimic-response: "npm:^4.0.0" + normalize-url: "npm:^8.0.0" + responselike: "npm:^3.0.0" + checksum: 10c0/41b6658db369f20c03128227ecd219ca7ac52a9d24fc0f499cc9aa5d40c097b48b73553504cebd137024d957c0ddb5b67cf3ac1439b136667f3586257763f88d + languageName: node + linkType: hard + +"call-bind@npm:^1.0.2, call-bind@npm:^1.0.5, call-bind@npm:^1.0.6, call-bind@npm:^1.0.7": + version: 1.0.7 + resolution: "call-bind@npm:1.0.7" + dependencies: + es-define-property: "npm:^1.0.0" + es-errors: "npm:^1.3.0" + function-bind: "npm:^1.1.2" + get-intrinsic: "npm:^1.2.4" + set-function-length: "npm:^1.2.1" + checksum: 10c0/a3ded2e423b8e2a265983dba81c27e125b48eefb2655e7dfab6be597088da3d47c47976c24bc51b8fd9af1061f8f87b4ab78a314f3c77784b2ae2ba535ad8b8d + languageName: node + linkType: hard + +"callsites@npm:^3.0.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: 10c0/fff92277400eb06c3079f9e74f3af120db9f8ea03bad0e84d9aede54bbe2d44a56cccb5f6cf12211f93f52306df87077ecec5b712794c5a9b5dac6d615a3f301 + languageName: node + linkType: hard + +"camel-case@npm:^4.1.2": + version: 4.1.2 + resolution: "camel-case@npm:4.1.2" + dependencies: + pascal-case: "npm:^3.1.2" + tslib: "npm:^2.0.3" + checksum: 10c0/bf9eefaee1f20edbed2e9a442a226793bc72336e2b99e5e48c6b7252b6f70b080fc46d8246ab91939e2af91c36cdd422e0af35161e58dd089590f302f8f64c8a + languageName: node + linkType: hard + +"camelcase@npm:^6.2.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 10c0/0d701658219bd3116d12da3eab31acddb3f9440790c0792e0d398f0a520a6a4058018e546862b6fba89d7ae990efaeb97da71e1913e9ebf5a8b5621a3d55c710 + languageName: node + linkType: hard + +"camelcase@npm:^7.0.1": + version: 7.0.1 + resolution: "camelcase@npm:7.0.1" + checksum: 10c0/3adfc9a0e96d51b3a2f4efe90a84dad3e206aaa81dfc664f1bd568270e1bf3b010aad31f01db16345b4ffe1910e16ab411c7273a19a859addd1b98ef7cf4cfbd + languageName: node + linkType: hard + +"caniuse-api@npm:^3.0.0": + version: 3.0.0 + resolution: "caniuse-api@npm:3.0.0" + dependencies: + browserslist: "npm:^4.0.0" + caniuse-lite: "npm:^1.0.0" + lodash.memoize: "npm:^4.1.2" + lodash.uniq: "npm:^4.5.0" + checksum: 10c0/60f9e85a3331e6d761b1b03eec71ca38ef7d74146bece34694853033292156b815696573ed734b65583acf493e88163618eda915c6c826d46a024c71a9572b4c + languageName: node + linkType: hard + +"caniuse-lite@npm:^1.0.0, caniuse-lite@npm:^1.0.30001646": + version: 1.0.30001651 + resolution: "caniuse-lite@npm:1.0.30001651" + checksum: 10c0/7821278952a6dbd17358e5d08083d258f092e2a530f5bc1840657cb140fbbc5ec44293bc888258c44a18a9570cde149ed05819ac8320b9710cf22f699891e6ad + languageName: node + linkType: hard + +"ccount@npm:^2.0.0": + version: 2.0.1 + resolution: "ccount@npm:2.0.1" + checksum: 10c0/3939b1664390174484322bc3f45b798462e6c07ee6384cb3d645e0aa2f318502d174845198c1561930e1d431087f74cf1fe291ae9a4722821a9f4ba67e574350 + languageName: node + linkType: hard + +"chalk@npm:^1.1.3": + version: 1.1.3 + resolution: "chalk@npm:1.1.3" + dependencies: + ansi-styles: "npm:^2.2.1" + escape-string-regexp: "npm:^1.0.2" + has-ansi: "npm:^2.0.0" + strip-ansi: "npm:^3.0.0" + supports-color: "npm:^2.0.0" + checksum: 10c0/28c3e399ec286bb3a7111fd4225ebedb0d7b813aef38a37bca7c498d032459c265ef43404201d5fbb8d888d29090899c95335b4c0cda13e8b126ff15c541cef8 + languageName: node + linkType: hard + +"chalk@npm:^2.4.2": + version: 2.4.2 + resolution: "chalk@npm:2.4.2" + dependencies: + ansi-styles: "npm:^3.2.1" + escape-string-regexp: "npm:^1.0.5" + supports-color: "npm:^5.3.0" + checksum: 10c0/e6543f02ec877732e3a2d1c3c3323ddb4d39fbab687c23f526e25bd4c6a9bf3b83a696e8c769d078e04e5754921648f7821b2a2acfd16c550435fd630026e073 + languageName: node + linkType: hard + +"chalk@npm:^4.0.0, chalk@npm:^4.1.0, chalk@npm:^4.1.2": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: "npm:^4.1.0" + supports-color: "npm:^7.1.0" + checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 + languageName: node + linkType: hard + +"chalk@npm:^5.0.1, chalk@npm:^5.2.0": + version: 5.3.0 + resolution: "chalk@npm:5.3.0" + checksum: 10c0/8297d436b2c0f95801103ff2ef67268d362021b8210daf8ddbe349695333eb3610a71122172ff3b0272f1ef2cf7cc2c41fdaa4715f52e49ffe04c56340feed09 + languageName: node + linkType: hard + +"char-regex@npm:^1.0.2": + version: 1.0.2 + resolution: "char-regex@npm:1.0.2" + checksum: 10c0/57a09a86371331e0be35d9083ba429e86c4f4648ecbe27455dbfb343037c16ee6fdc7f6b61f433a57cc5ded5561d71c56a150e018f40c2ffb7bc93a26dae341e + languageName: node + linkType: hard + +"character-entities-html4@npm:^2.0.0": + version: 2.1.0 + resolution: "character-entities-html4@npm:2.1.0" + checksum: 10c0/fe61b553f083400c20c0b0fd65095df30a0b445d960f3bbf271536ae6c3ba676f39cb7af0b4bf2755812f08ab9b88f2feed68f9aebb73bb153f7a115fe5c6e40 + languageName: node + linkType: hard + +"character-entities-legacy@npm:^1.0.0": + version: 1.1.4 + resolution: "character-entities-legacy@npm:1.1.4" + checksum: 10c0/ea4ca9c29887335eed86d78fc67a640168342b1274da84c097abb0575a253d1265281a5052f9a863979e952bcc267b4ecaaf4fe233a7e1e0d8a47806c65b96c7 + languageName: node + linkType: hard + +"character-entities-legacy@npm:^3.0.0": + version: 3.0.0 + resolution: "character-entities-legacy@npm:3.0.0" + checksum: 10c0/ec4b430af873661aa754a896a2b55af089b4e938d3d010fad5219299a6b6d32ab175142699ee250640678cd64bdecd6db3c9af0b8759ab7b155d970d84c4c7d1 + languageName: node + linkType: hard + +"character-entities@npm:^1.0.0": + version: 1.2.4 + resolution: "character-entities@npm:1.2.4" + checksum: 10c0/ad015c3d7163563b8a0ee1f587fb0ef305ef344e9fd937f79ca51cccc233786a01d591d989d5bf7b2e66b528ac9efba47f3b1897358324e69932f6d4b25adfe1 + languageName: node + linkType: hard + +"character-entities@npm:^2.0.0": + version: 2.0.2 + resolution: "character-entities@npm:2.0.2" + checksum: 10c0/b0c645a45bcc90ff24f0e0140f4875a8436b8ef13b6bcd31ec02cfb2ca502b680362aa95386f7815bdc04b6464d48cf191210b3840d7c04241a149ede591a308 + languageName: node + linkType: hard + +"character-reference-invalid@npm:^1.0.0": + version: 1.1.4 + resolution: "character-reference-invalid@npm:1.1.4" + checksum: 10c0/29f05081c5817bd1e975b0bf61e77b60a40f62ad371d0f0ce0fdb48ab922278bc744d1fbe33771dced751887a8403f265ff634542675c8d7375f6ff4811efd0e + languageName: node + linkType: hard + +"character-reference-invalid@npm:^2.0.0": + version: 2.0.1 + resolution: "character-reference-invalid@npm:2.0.1" + checksum: 10c0/2ae0dec770cd8659d7e8b0ce24392d83b4c2f0eb4a3395c955dce5528edd4cc030a794cfa06600fcdd700b3f2de2f9b8e40e309c0011c4180e3be64a0b42e6a1 + languageName: node + linkType: hard + +"cheerio-select@npm:^2.1.0": + version: 2.1.0 + resolution: "cheerio-select@npm:2.1.0" + dependencies: + boolbase: "npm:^1.0.0" + css-select: "npm:^5.1.0" + css-what: "npm:^6.1.0" + domelementtype: "npm:^2.3.0" + domhandler: "npm:^5.0.3" + domutils: "npm:^3.0.1" + checksum: 10c0/2242097e593919dba4aacb97d7b8275def8b9ec70b00aa1f43335456870cfc9e284eae2080bdc832ed232dabb9eefcf56c722d152da4a154813fb8814a55d282 + languageName: node + linkType: hard + +"cheerio@npm:1.0.0-rc.12": + version: 1.0.0-rc.12 + resolution: "cheerio@npm:1.0.0-rc.12" + dependencies: + cheerio-select: "npm:^2.1.0" + dom-serializer: "npm:^2.0.0" + domhandler: "npm:^5.0.3" + domutils: "npm:^3.0.1" + htmlparser2: "npm:^8.0.1" + parse5: "npm:^7.0.0" + parse5-htmlparser2-tree-adapter: "npm:^7.0.0" + checksum: 10c0/c85d2f2461e3f024345b78e0bb16ad8e41492356210470dd1e7d5a91391da9fcf6c0a7cb48a9ba8820330153f0cedb4d0a60c7af15d96ecdb3092299b9d9c0cc + languageName: node + linkType: hard + +"chokidar@npm:>=3.0.0 <4.0.0, chokidar@npm:^3.4.2, chokidar@npm:^3.5.3": + version: 3.6.0 + resolution: "chokidar@npm:3.6.0" + dependencies: + anymatch: "npm:~3.1.2" + braces: "npm:~3.0.2" + fsevents: "npm:~2.3.2" + glob-parent: "npm:~5.1.2" + is-binary-path: "npm:~2.1.0" + is-glob: "npm:~4.0.1" + normalize-path: "npm:~3.0.0" + readdirp: "npm:~3.6.0" + dependenciesMeta: + fsevents: + optional: true + checksum: 10c0/8361dcd013f2ddbe260eacb1f3cb2f2c6f2b0ad118708a343a5ed8158941a39cb8fb1d272e0f389712e74ee90ce8ba864eece9e0e62b9705cb468a2f6d917462 + languageName: node + linkType: hard + +"chownr@npm:^1.1.1": + version: 1.1.4 + resolution: "chownr@npm:1.1.4" + checksum: 10c0/ed57952a84cc0c802af900cf7136de643d3aba2eecb59d29344bc2f3f9bf703a301b9d84cdc71f82c3ffc9ccde831b0d92f5b45f91727d6c9da62f23aef9d9db + languageName: node + linkType: hard + +"chownr@npm:^2.0.0": + version: 2.0.0 + resolution: "chownr@npm:2.0.0" + checksum: 10c0/594754e1303672171cc04e50f6c398ae16128eb134a88f801bf5354fd96f205320f23536a045d9abd8b51024a149696e51231565891d4efdab8846021ecf88e6 + languageName: node + linkType: hard + +"chrome-trace-event@npm:^1.0.2": + version: 1.0.4 + resolution: "chrome-trace-event@npm:1.0.4" + checksum: 10c0/3058da7a5f4934b87cf6a90ef5fb68ebc5f7d06f143ed5a4650208e5d7acae47bc03ec844b29fbf5ba7e46e8daa6acecc878f7983a4f4bb7271593da91e61ff5 + languageName: node + linkType: hard + +"ci-info@npm:^3.2.0": + version: 3.9.0 + resolution: "ci-info@npm:3.9.0" + checksum: 10c0/6f0109e36e111684291d46123d491bc4e7b7a1934c3a20dea28cba89f1d4a03acd892f5f6a81ed3855c38647e285a150e3c9ba062e38943bef57fee6c1554c3a + languageName: node + linkType: hard + +"ci-info@npm:^4.0.0": + version: 4.0.0 + resolution: "ci-info@npm:4.0.0" + checksum: 10c0/ecc003e5b60580bd081d83dd61d398ddb8607537f916313e40af4667f9c92a1243bd8e8a591a5aa78e418afec245dbe8e90a0e26e39ca0825129a99b978dd3f9 + languageName: node + linkType: hard + +"clean-css@npm:^5.2.2, clean-css@npm:^5.3.2, clean-css@npm:~5.3.2": + version: 5.3.3 + resolution: "clean-css@npm:5.3.3" + dependencies: + source-map: "npm:~0.6.0" + checksum: 10c0/381de7523e23f3762eb180e327dcc0cedafaf8cb1cd8c26b7cc1fc56e0829a92e734729c4f955394d65ed72fb62f82d8baf78af34b33b8a7d41ebad2accdd6fb + languageName: node + linkType: hard + +"clean-stack@npm:^2.0.0": + version: 2.2.0 + resolution: "clean-stack@npm:2.2.0" + checksum: 10c0/1f90262d5f6230a17e27d0c190b09d47ebe7efdd76a03b5a1127863f7b3c9aec4c3e6c8bb3a7bbf81d553d56a1fd35728f5a8ef4c63f867ac8d690109742a8c1 + languageName: node + linkType: hard + +"cli-boxes@npm:^3.0.0": + version: 3.0.0 + resolution: "cli-boxes@npm:3.0.0" + checksum: 10c0/4db3e8fbfaf1aac4fb3a6cbe5a2d3fa048bee741a45371b906439b9ffc821c6e626b0f108bdcd3ddf126a4a319409aedcf39a0730573ff050fdd7b6731e99fb9 + languageName: node + linkType: hard + +"cli-table3@npm:^0.6.3": + version: 0.6.5 + resolution: "cli-table3@npm:0.6.5" + dependencies: + "@colors/colors": "npm:1.5.0" + string-width: "npm:^4.2.0" + dependenciesMeta: + "@colors/colors": + optional: true + checksum: 10c0/d7cc9ed12212ae68241cc7a3133c52b844113b17856e11f4f81308acc3febcea7cc9fd298e70933e294dd642866b29fd5d113c2c098948701d0c35f09455de78 + languageName: node + linkType: hard + +"clone-deep@npm:^4.0.1": + version: 4.0.1 + resolution: "clone-deep@npm:4.0.1" + dependencies: + is-plain-object: "npm:^2.0.4" + kind-of: "npm:^6.0.2" + shallow-clone: "npm:^3.0.0" + checksum: 10c0/637753615aa24adf0f2d505947a1bb75e63964309034a1cf56ba4b1f30af155201edd38d26ffe26911adaae267a3c138b344a4947d39f5fc1b6d6108125aa758 + languageName: node + linkType: hard + +"clsx@npm:^2.0.0, clsx@npm:^2.1.1": + version: 2.1.1 + resolution: "clsx@npm:2.1.1" + checksum: 10c0/c4c8eb865f8c82baab07e71bfa8897c73454881c4f99d6bc81585aecd7c441746c1399d08363dc096c550cceaf97bd4ce1e8854e1771e9998d9f94c4fe075839 + languageName: node + linkType: hard + +"collapse-white-space@npm:^2.0.0": + version: 2.1.0 + resolution: "collapse-white-space@npm:2.1.0" + checksum: 10c0/b2e2800f4ab261e62eb27a1fbe853378296e3a726d6695117ed033e82d61fb6abeae4ffc1465d5454499e237005de9cfc52c9562dc7ca4ac759b9a222ef14453 + languageName: node + linkType: hard + +"color-convert@npm:^1.9.0": + version: 1.9.3 + resolution: "color-convert@npm:1.9.3" + dependencies: + color-name: "npm:1.1.3" + checksum: 10c0/5ad3c534949a8c68fca8fbc6f09068f435f0ad290ab8b2f76841b9e6af7e0bb57b98cb05b0e19fe33f5d91e5a8611ad457e5f69e0a484caad1f7487fd0e8253c + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: "npm:~1.1.4" + checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 + languageName: node + linkType: hard + +"color-name@npm:1.1.3": + version: 1.1.3 + resolution: "color-name@npm:1.1.3" + checksum: 10c0/566a3d42cca25b9b3cd5528cd7754b8e89c0eb646b7f214e8e2eaddb69994ac5f0557d9c175eb5d8f0ad73531140d9c47525085ee752a91a2ab15ab459caf6d6 + languageName: node + linkType: hard + +"color-name@npm:^1.0.0, color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 + languageName: node + linkType: hard + +"color-string@npm:^1.9.0": + version: 1.9.1 + resolution: "color-string@npm:1.9.1" + dependencies: + color-name: "npm:^1.0.0" + simple-swizzle: "npm:^0.2.2" + checksum: 10c0/b0bfd74c03b1f837f543898b512f5ea353f71630ccdd0d66f83028d1f0924a7d4272deb278b9aef376cacf1289b522ac3fb175e99895283645a2dc3a33af2404 + languageName: node + linkType: hard + +"color@npm:^4.2.3": + version: 4.2.3 + resolution: "color@npm:4.2.3" + dependencies: + color-convert: "npm:^2.0.1" + color-string: "npm:^1.9.0" + checksum: 10c0/7fbe7cfb811054c808349de19fb380252e5e34e61d7d168ec3353e9e9aacb1802674bddc657682e4e9730c2786592a4de6f8283e7e0d3870b829bb0b7b2f6118 + languageName: node + linkType: hard + +"colord@npm:^2.9.3": + version: 2.9.3 + resolution: "colord@npm:2.9.3" + checksum: 10c0/9699e956894d8996b28c686afe8988720785f476f59335c80ce852ded76ab3ebe252703aec53d9bef54f6219aea6b960fb3d9a8300058a1d0c0d4026460cd110 + languageName: node + linkType: hard + +"colorette@npm:^2.0.10": + version: 2.0.20 + resolution: "colorette@npm:2.0.20" + checksum: 10c0/e94116ff33b0ff56f3b83b9ace895e5bf87c2a7a47b3401b8c3f3226e050d5ef76cf4072fb3325f9dc24d1698f9b730baf4e05eeaf861d74a1883073f4c98a40 + languageName: node + linkType: hard + +"combine-promises@npm:^1.1.0": + version: 1.2.0 + resolution: "combine-promises@npm:1.2.0" + checksum: 10c0/906ebf056006eff93c11548df0415053b6756145dae1f5a89579e743cb15fceeb0604555791321db4fba5072aa39bb4de6547e9cdf14589fe949b33d1613422c + languageName: node + linkType: hard + +"comma-separated-tokens@npm:^2.0.0": + version: 2.0.3 + resolution: "comma-separated-tokens@npm:2.0.3" + checksum: 10c0/91f90f1aae320f1755d6957ef0b864fe4f54737f3313bd95e0802686ee2ca38bff1dd381964d00ae5db42912dd1f4ae5c2709644e82706ffc6f6842a813cdd67 + languageName: node + linkType: hard + +"commander@npm:7, commander@npm:^7.2.0": + version: 7.2.0 + resolution: "commander@npm:7.2.0" + checksum: 10c0/8d690ff13b0356df7e0ebbe6c59b4712f754f4b724d4f473d3cc5b3fdcf978e3a5dc3078717858a2ceb50b0f84d0660a7f22a96cdc50fb877d0c9bb31593d23a + languageName: node + linkType: hard + +"commander@npm:^10.0.0": + version: 10.0.1 + resolution: "commander@npm:10.0.1" + checksum: 10c0/53f33d8927758a911094adadda4b2cbac111a5b377d8706700587650fd8f45b0bbe336de4b5c3fe47fd61f420a3d9bd452b6e0e6e5600a7e74d7bf0174f6efe3 + languageName: node + linkType: hard + +"commander@npm:^2.20.0": + version: 2.20.3 + resolution: "commander@npm:2.20.3" + checksum: 10c0/74c781a5248c2402a0a3e966a0a2bba3c054aad144f5c023364be83265e796b20565aa9feff624132ff629aa64e16999fa40a743c10c12f7c61e96a794b99288 + languageName: node + linkType: hard + +"commander@npm:^5.1.0": + version: 5.1.0 + resolution: "commander@npm:5.1.0" + checksum: 10c0/da9d71dbe4ce039faf1fe9eac3771dca8c11d66963341f62602f7b66e36d2a3f8883407af4f9a37b1db1a55c59c0c1325f186425764c2e963dc1d67aec2a4b6d + languageName: node + linkType: hard + +"commander@npm:^8.3.0": + version: 8.3.0 + resolution: "commander@npm:8.3.0" + checksum: 10c0/8b043bb8322ea1c39664a1598a95e0495bfe4ca2fad0d84a92d7d1d8d213e2a155b441d2470c8e08de7c4a28cf2bc6e169211c49e1b21d9f7edc6ae4d9356060 + languageName: node + linkType: hard + +"common-path-prefix@npm:^3.0.0": + version: 3.0.0 + resolution: "common-path-prefix@npm:3.0.0" + checksum: 10c0/c4a74294e1b1570f4a8ab435285d185a03976c323caa16359053e749db4fde44e3e6586c29cd051100335e11895767cbbd27ea389108e327d62f38daf4548fdb + languageName: node + linkType: hard + +"common-tags@npm:^1.4.0": + version: 1.8.2 + resolution: "common-tags@npm:1.8.2" + checksum: 10c0/23efe47ff0a1a7c91489271b3a1e1d2a171c12ec7f9b35b29b2fce51270124aff0ec890087e2bc2182c1cb746e232ab7561aaafe05f1e7452aea733d2bfe3f63 + languageName: node + linkType: hard + +"compressible@npm:~2.0.16": + version: 2.0.18 + resolution: "compressible@npm:2.0.18" + dependencies: + mime-db: "npm:>= 1.43.0 < 2" + checksum: 10c0/8a03712bc9f5b9fe530cc5a79e164e665550d5171a64575d7dcf3e0395d7b4afa2d79ab176c61b5b596e28228b350dd07c1a2a6ead12fd81d1b6cd632af2fef7 + languageName: node + linkType: hard + +"compression@npm:^1.7.4": + version: 1.7.4 + resolution: "compression@npm:1.7.4" + dependencies: + accepts: "npm:~1.3.5" + bytes: "npm:3.0.0" + compressible: "npm:~2.0.16" + debug: "npm:2.6.9" + on-headers: "npm:~1.0.2" + safe-buffer: "npm:5.1.2" + vary: "npm:~1.1.2" + checksum: 10c0/138db836202a406d8a14156a5564fb1700632a76b6e7d1546939472895a5304f2b23c80d7a22bf44c767e87a26e070dbc342ea63bb45ee9c863354fa5556bbbc + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f + languageName: node + linkType: hard + +"concat-stream@npm:^2.0.0": + version: 2.0.0 + resolution: "concat-stream@npm:2.0.0" + dependencies: + buffer-from: "npm:^1.0.0" + inherits: "npm:^2.0.3" + readable-stream: "npm:^3.0.2" + typedarray: "npm:^0.0.6" + checksum: 10c0/29565dd9198fe1d8cf57f6cc71527dbc6ad67e12e4ac9401feb389c53042b2dceedf47034cbe702dfc4fd8df3ae7e6bfeeebe732cc4fa2674e484c13f04c219a + languageName: node + linkType: hard + +"config-chain@npm:^1.1.11": + version: 1.1.13 + resolution: "config-chain@npm:1.1.13" + dependencies: + ini: "npm:^1.3.4" + proto-list: "npm:~1.2.1" + checksum: 10c0/39d1df18739d7088736cc75695e98d7087aea43646351b028dfabd5508d79cf6ef4c5bcd90471f52cd87ae470d1c5490c0a8c1a292fbe6ee9ff688061ea0963e + languageName: node + linkType: hard + +"configstore@npm:^6.0.0": + version: 6.0.0 + resolution: "configstore@npm:6.0.0" + dependencies: + dot-prop: "npm:^6.0.1" + graceful-fs: "npm:^4.2.6" + unique-string: "npm:^3.0.0" + write-file-atomic: "npm:^3.0.3" + xdg-basedir: "npm:^5.0.1" + checksum: 10c0/6681a96038ab3e0397cbdf55e6e1624ac3dfa3afe955e219f683df060188a418bda043c9114a59a337e7aec9562b0a0c838ed7db24289e6d0c266bc8313b9580 + languageName: node + linkType: hard + +"connect-history-api-fallback@npm:^2.0.0": + version: 2.0.0 + resolution: "connect-history-api-fallback@npm:2.0.0" + checksum: 10c0/90fa8b16ab76e9531646cc70b010b1dbd078153730c510d3142f6cf07479ae8a812c5a3c0e40a28528dd1681a62395d0cfdef67da9e914c4772ac85d69a3ed87 + languageName: node + linkType: hard + +"consola@npm:^2.15.3": + version: 2.15.3 + resolution: "consola@npm:2.15.3" + checksum: 10c0/34a337e6b4a1349ee4d7b4c568484344418da8fdb829d7d71bfefcd724f608f273987633b6eef465e8de510929907a092e13cb7a28a5d3acb3be446fcc79fd5e + languageName: node + linkType: hard + +"consolidated-events@npm:^1.1.0 || ^2.0.0": + version: 2.0.2 + resolution: "consolidated-events@npm:2.0.2" + checksum: 10c0/d82df47cfd4d43289cdbc5c6d9a924f1445b1c753d36ee1250efa2ee008bca0bc72702ab2e9bda58e1deb8083dc45efdbe3deb363e094fcba7ec6b7b3589df53 + languageName: node + linkType: hard + +"content-disposition@npm:0.5.2": + version: 0.5.2 + resolution: "content-disposition@npm:0.5.2" + checksum: 10c0/49eebaa0da1f9609b192e99d7fec31d1178cb57baa9d01f5b63b29787ac31e9d18b5a1033e854c68c9b6cce790e700a6f7fa60e43f95e2e416404e114a8f2f49 + languageName: node + linkType: hard + +"content-disposition@npm:0.5.4": + version: 0.5.4 + resolution: "content-disposition@npm:0.5.4" + dependencies: + safe-buffer: "npm:5.2.1" + checksum: 10c0/bac0316ebfeacb8f381b38285dc691c9939bf0a78b0b7c2d5758acadad242d04783cee5337ba7d12a565a19075af1b3c11c728e1e4946de73c6ff7ce45f3f1bb + languageName: node + linkType: hard + +"content-type@npm:~1.0.4, content-type@npm:~1.0.5": + version: 1.0.5 + resolution: "content-type@npm:1.0.5" + checksum: 10c0/b76ebed15c000aee4678c3707e0860cb6abd4e680a598c0a26e17f0bfae723ec9cc2802f0ff1bc6e4d80603719010431d2231018373d4dde10f9ccff9dadf5af + languageName: node + linkType: hard + +"convert-source-map@npm:^2.0.0": + version: 2.0.0 + resolution: "convert-source-map@npm:2.0.0" + checksum: 10c0/8f2f7a27a1a011cc6cc88cc4da2d7d0cfa5ee0369508baae3d98c260bb3ac520691464e5bbe4ae7cdf09860c1d69ecc6f70c63c6e7c7f7e3f18ec08484dc7d9b + languageName: node + linkType: hard + +"cookie-signature@npm:1.0.6": + version: 1.0.6 + resolution: "cookie-signature@npm:1.0.6" + checksum: 10c0/b36fd0d4e3fef8456915fcf7742e58fbfcc12a17a018e0eb9501c9d5ef6893b596466f03b0564b81af29ff2538fd0aa4b9d54fe5ccbfb4c90ea50ad29fe2d221 + languageName: node + linkType: hard + +"cookie@npm:0.6.0": + version: 0.6.0 + resolution: "cookie@npm:0.6.0" + checksum: 10c0/f2318b31af7a31b4ddb4a678d024514df5e705f9be5909a192d7f116cfb6d45cbacf96a473fa733faa95050e7cff26e7832bb3ef94751592f1387b71c8956686 + languageName: node + linkType: hard + +"copy-text-to-clipboard@npm:^3.2.0": + version: 3.2.0 + resolution: "copy-text-to-clipboard@npm:3.2.0" + checksum: 10c0/d60fdadc59d526e19d56ad23cec2b292d33c771a5091621bd322d138804edd3c10eb2367d46ec71b39f5f7f7116a2910b332281aeb36a5b679199d746a8a5381 + languageName: node + linkType: hard + +"copy-webpack-plugin@npm:^11.0.0": + version: 11.0.0 + resolution: "copy-webpack-plugin@npm:11.0.0" + dependencies: + fast-glob: "npm:^3.2.11" + glob-parent: "npm:^6.0.1" + globby: "npm:^13.1.1" + normalize-path: "npm:^3.0.0" + schema-utils: "npm:^4.0.0" + serialize-javascript: "npm:^6.0.0" + peerDependencies: + webpack: ^5.1.0 + checksum: 10c0/a667dd226b26f148584a35fb705f5af926d872584912cf9fd203c14f2b3a68f473a1f5cf768ec1dd5da23820823b850e5d50458b685c468e4a224b25c12a15b4 + languageName: node + linkType: hard + +"core-js-compat@npm:^3.37.1, core-js-compat@npm:^3.38.0": + version: 3.38.1 + resolution: "core-js-compat@npm:3.38.1" + dependencies: + browserslist: "npm:^4.23.3" + checksum: 10c0/d8bc8a35591fc5fbf3e376d793f298ec41eb452619c7ef9de4ea59b74be06e9fda799e0dcbf9ba59880dae87e3b41fb191d744ffc988315642a1272bb9442b31 + languageName: node + linkType: hard + +"core-js-pure@npm:^3.30.2": + version: 3.38.1 + resolution: "core-js-pure@npm:3.38.1" + checksum: 10c0/466adbc0468b8c2a95b9bc49829492dece2cc6584d757c5b38555a26ed3d71f8364ac1ea3128a0a949e004e0e60206cc535ed84320982c3efb9a40c1785ddcc6 + languageName: node + linkType: hard + +"core-js@npm:^3.31.1": + version: 3.38.1 + resolution: "core-js@npm:3.38.1" + checksum: 10c0/7df063b6f13a54e46515817ac3e235c6c598a4d3de65cd188a061fc250642be313b895fb9fb2f36e1e31890a1bb4ef61d82666a340413f540b7ce3c65689739b + languageName: node + linkType: hard + +"core-util-is@npm:~1.0.0": + version: 1.0.3 + resolution: "core-util-is@npm:1.0.3" + checksum: 10c0/90a0e40abbddfd7618f8ccd63a74d88deea94e77d0e8dbbea059fa7ebebb8fbb4e2909667fe26f3a467073de1a542ebe6ae4c73a73745ac5833786759cd906c9 + languageName: node + linkType: hard + +"cose-base@npm:^1.0.0": + version: 1.0.3 + resolution: "cose-base@npm:1.0.3" + dependencies: + layout-base: "npm:^1.0.0" + checksum: 10c0/a6e400b1d101393d6af0967c1353355777c1106c40417c5acaef6ca8bdda41e2fc9398f466d6c85be30290943ad631f2590569f67b3fd5368a0d8318946bd24f + languageName: node + linkType: hard + +"cosmiconfig@npm:^6.0.0": + version: 6.0.0 + resolution: "cosmiconfig@npm:6.0.0" + dependencies: + "@types/parse-json": "npm:^4.0.0" + import-fresh: "npm:^3.1.0" + parse-json: "npm:^5.0.0" + path-type: "npm:^4.0.0" + yaml: "npm:^1.7.2" + checksum: 10c0/666ed8732d0bf7d7fe6f8516c8ee6041e0622032e8fa26201577b883d2767ad105d03f38b34b93d1f02f26b22a89e7bab4443b9d2e7f931f48d0e944ffa038b5 + languageName: node + linkType: hard + +"cosmiconfig@npm:^8.1.3, cosmiconfig@npm:^8.3.5": + version: 8.3.6 + resolution: "cosmiconfig@npm:8.3.6" + dependencies: + import-fresh: "npm:^3.3.0" + js-yaml: "npm:^4.1.0" + parse-json: "npm:^5.2.0" + path-type: "npm:^4.0.0" + peerDependencies: + typescript: ">=4.9.5" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/0382a9ed13208f8bfc22ca2f62b364855207dffdb73dc26e150ade78c3093f1cf56172df2dd460c8caf2afa91c0ed4ec8a88c62f8f9cd1cf423d26506aa8797a + languageName: node + linkType: hard + +"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": + version: 7.0.3 + resolution: "cross-spawn@npm:7.0.3" + dependencies: + path-key: "npm:^3.1.0" + shebang-command: "npm:^2.0.0" + which: "npm:^2.0.1" + checksum: 10c0/5738c312387081c98d69c98e105b6327b069197f864a60593245d64c8089c8a0a744e16349281210d56835bb9274130d825a78b2ad6853ca13cfbeffc0c31750 + languageName: node + linkType: hard + +"crypto-random-string@npm:^4.0.0": + version: 4.0.0 + resolution: "crypto-random-string@npm:4.0.0" + dependencies: + type-fest: "npm:^1.0.1" + checksum: 10c0/16e11a3c8140398f5408b7fded35a961b9423c5dac39a60cbbd08bd3f0e07d7de130e87262adea7db03ec1a7a4b7551054e0db07ee5408b012bac5400cfc07a5 + languageName: node + linkType: hard + +"css-declaration-sorter@npm:^7.2.0": + version: 7.2.0 + resolution: "css-declaration-sorter@npm:7.2.0" + peerDependencies: + postcss: ^8.0.9 + checksum: 10c0/d8516be94f8f2daa233ef021688b965c08161624cbf830a4d7ee1099429437c0ee124d35c91b1c659cfd891a68e8888aa941726dab12279bc114aaed60a94606 + languageName: node + linkType: hard + +"css-loader@npm:^6.8.1": + version: 6.11.0 + resolution: "css-loader@npm:6.11.0" + dependencies: + icss-utils: "npm:^5.1.0" + postcss: "npm:^8.4.33" + postcss-modules-extract-imports: "npm:^3.1.0" + postcss-modules-local-by-default: "npm:^4.0.5" + postcss-modules-scope: "npm:^3.2.0" + postcss-modules-values: "npm:^4.0.0" + postcss-value-parser: "npm:^4.2.0" + semver: "npm:^7.5.4" + peerDependencies: + "@rspack/core": 0.x || 1.x + webpack: ^5.0.0 + peerDependenciesMeta: + "@rspack/core": + optional: true + webpack: + optional: true + checksum: 10c0/bb52434138085fed06a33e2ffbdae9ee9014ad23bf60f59d6b7ee67f28f26c6b1764024d3030bd19fd884d6ee6ee2224eaed64ad19eb18fbbb23d148d353a965 + languageName: node + linkType: hard + +"css-minimizer-webpack-plugin@npm:^5.0.1": + version: 5.0.1 + resolution: "css-minimizer-webpack-plugin@npm:5.0.1" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.18" + cssnano: "npm:^6.0.1" + jest-worker: "npm:^29.4.3" + postcss: "npm:^8.4.24" + schema-utils: "npm:^4.0.1" + serialize-javascript: "npm:^6.0.1" + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + "@parcel/css": + optional: true + "@swc/css": + optional: true + clean-css: + optional: true + csso: + optional: true + esbuild: + optional: true + lightningcss: + optional: true + checksum: 10c0/1792259e18f7c5ee25b6bbf60b38b64201747add83d1f751c8c654159b46ebacd0d1103d35f17d97197033e21e02d2ba4a4e9aa14c9c0d067b7c7653c721814e + languageName: node + linkType: hard + +"css-select@npm:^4.1.3": + version: 4.3.0 + resolution: "css-select@npm:4.3.0" + dependencies: + boolbase: "npm:^1.0.0" + css-what: "npm:^6.0.1" + domhandler: "npm:^4.3.1" + domutils: "npm:^2.8.0" + nth-check: "npm:^2.0.1" + checksum: 10c0/a489d8e5628e61063d5a8fe0fa1cc7ae2478cb334a388a354e91cf2908154be97eac9fa7ed4dffe87a3e06cf6fcaa6016553115335c4fd3377e13dac7bd5a8e1 + languageName: node + linkType: hard + +"css-select@npm:^5.1.0": + version: 5.1.0 + resolution: "css-select@npm:5.1.0" + dependencies: + boolbase: "npm:^1.0.0" + css-what: "npm:^6.1.0" + domhandler: "npm:^5.0.2" + domutils: "npm:^3.0.1" + nth-check: "npm:^2.0.1" + checksum: 10c0/551c60dba5b54054741032c1793b5734f6ba45e23ae9e82761a3c0ed1acbb8cfedfa443aaba3a3c1a54cac12b456d2012a09d2cd5f0e82e430454c1b9d84d500 + languageName: node + linkType: hard + +"css-tree@npm:^2.3.1": + version: 2.3.1 + resolution: "css-tree@npm:2.3.1" + dependencies: + mdn-data: "npm:2.0.30" + source-map-js: "npm:^1.0.1" + checksum: 10c0/6f8c1a11d5e9b14bf02d10717fc0351b66ba12594166f65abfbd8eb8b5b490dd367f5c7721db241a3c792d935fc6751fbc09f7e1598d421477ad9fadc30f4f24 + languageName: node + linkType: hard + +"css-tree@npm:~2.2.0": + version: 2.2.1 + resolution: "css-tree@npm:2.2.1" + dependencies: + mdn-data: "npm:2.0.28" + source-map-js: "npm:^1.0.1" + checksum: 10c0/47e87b0f02f8ac22f57eceb65c58011dd142d2158128882a0bf963cf2eabb81a4ebbc2e3790c8289be7919fa8b83750c7b69272bd66772c708143b772ba3c186 + languageName: node + linkType: hard + +"css-what@npm:^6.0.1, css-what@npm:^6.1.0": + version: 6.1.0 + resolution: "css-what@npm:6.1.0" + checksum: 10c0/a09f5a6b14ba8dcf57ae9a59474722e80f20406c53a61e9aedb0eedc693b135113ffe2983f4efc4b5065ae639442e9ae88df24941ef159c218b231011d733746 + languageName: node + linkType: hard + +"cssesc@npm:^3.0.0": + version: 3.0.0 + resolution: "cssesc@npm:3.0.0" + bin: + cssesc: bin/cssesc + checksum: 10c0/6bcfd898662671be15ae7827120472c5667afb3d7429f1f917737f3bf84c4176003228131b643ae74543f17a394446247df090c597bb9a728cce298606ed0aa7 + languageName: node + linkType: hard + +"cssnano-preset-advanced@npm:^6.1.2": + version: 6.1.2 + resolution: "cssnano-preset-advanced@npm:6.1.2" + dependencies: + autoprefixer: "npm:^10.4.19" + browserslist: "npm:^4.23.0" + cssnano-preset-default: "npm:^6.1.2" + postcss-discard-unused: "npm:^6.0.5" + postcss-merge-idents: "npm:^6.0.3" + postcss-reduce-idents: "npm:^6.0.3" + postcss-zindex: "npm:^6.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/22d3ddab258e6b31e7e2e7c48712f023b60fadb2813929752dace0326e28cd250830b5420a33f81b01df52d2460cb5f999fff5907f58508809efe1a8a739a707 + languageName: node + linkType: hard + +"cssnano-preset-default@npm:^6.1.2": + version: 6.1.2 + resolution: "cssnano-preset-default@npm:6.1.2" + dependencies: + browserslist: "npm:^4.23.0" + css-declaration-sorter: "npm:^7.2.0" + cssnano-utils: "npm:^4.0.2" + postcss-calc: "npm:^9.0.1" + postcss-colormin: "npm:^6.1.0" + postcss-convert-values: "npm:^6.1.0" + postcss-discard-comments: "npm:^6.0.2" + postcss-discard-duplicates: "npm:^6.0.3" + postcss-discard-empty: "npm:^6.0.3" + postcss-discard-overridden: "npm:^6.0.2" + postcss-merge-longhand: "npm:^6.0.5" + postcss-merge-rules: "npm:^6.1.1" + postcss-minify-font-values: "npm:^6.1.0" + postcss-minify-gradients: "npm:^6.0.3" + postcss-minify-params: "npm:^6.1.0" + postcss-minify-selectors: "npm:^6.0.4" + postcss-normalize-charset: "npm:^6.0.2" + postcss-normalize-display-values: "npm:^6.0.2" + postcss-normalize-positions: "npm:^6.0.2" + postcss-normalize-repeat-style: "npm:^6.0.2" + postcss-normalize-string: "npm:^6.0.2" + postcss-normalize-timing-functions: "npm:^6.0.2" + postcss-normalize-unicode: "npm:^6.1.0" + postcss-normalize-url: "npm:^6.0.2" + postcss-normalize-whitespace: "npm:^6.0.2" + postcss-ordered-values: "npm:^6.0.2" + postcss-reduce-initial: "npm:^6.1.0" + postcss-reduce-transforms: "npm:^6.0.2" + postcss-svgo: "npm:^6.0.3" + postcss-unique-selectors: "npm:^6.0.4" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/af99021f936763850f5f35dc9e6a9dfb0da30856dea36e0420b011da2a447099471db2a5f3d1f5f52c0489da186caf9a439d8f048a80f82617077efb018333fa + languageName: node + linkType: hard + +"cssnano-utils@npm:^4.0.2": + version: 4.0.2 + resolution: "cssnano-utils@npm:4.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/260b8c8ffa48b908aa77ef129f9b8648ecd92aed405b20e7fe6b8370779dd603530344fc9d96683d53533246e48b36ac9d2aa5a476b4f81c547bbad86d187f35 + languageName: node + linkType: hard + +"cssnano@npm:^6.0.1, cssnano@npm:^6.1.2": + version: 6.1.2 + resolution: "cssnano@npm:6.1.2" + dependencies: + cssnano-preset-default: "npm:^6.1.2" + lilconfig: "npm:^3.1.1" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/4df0dc0389b34b38acb09b7cfb07267b0eda95349c6d5e9b7666acc7200bb33359650869a60168e9d878298b05f4ad2c7f070815c90551720a3f4e1037f79691 + languageName: node + linkType: hard + +"csso@npm:^5.0.5": + version: 5.0.5 + resolution: "csso@npm:5.0.5" + dependencies: + css-tree: "npm:~2.2.0" + checksum: 10c0/ab4beb1e97dd7e207c10e9925405b45f15a6cd1b4880a8686ad573aa6d476aed28b4121a666cffd26c37a26179f7b54741f7c257543003bfb244d06a62ad569b + languageName: node + linkType: hard + +"csstype@npm:^3.0.2": + version: 3.1.3 + resolution: "csstype@npm:3.1.3" + checksum: 10c0/80c089d6f7e0c5b2bd83cf0539ab41474198579584fa10d86d0cafe0642202343cbc119e076a0b1aece191989477081415d66c9fefbf3c957fc2fc4b7009f248 + languageName: node + linkType: hard + +"cytoscape-cose-bilkent@npm:^4.1.0": + version: 4.1.0 + resolution: "cytoscape-cose-bilkent@npm:4.1.0" + dependencies: + cose-base: "npm:^1.0.0" + peerDependencies: + cytoscape: ^3.2.0 + checksum: 10c0/5e2480ddba9da1a68e700ed2c674cbfd51e9efdbd55788f1971a68de4eb30708e3b3a5e808bf5628f7a258680406bbe6586d87a9133e02a9bdc1ab1a92f512f2 + languageName: node + linkType: hard + +"cytoscape@npm:^3.28.1": + version: 3.30.2 + resolution: "cytoscape@npm:3.30.2" + checksum: 10c0/a8b095969900600b58fff823db73d69ec3f22fc9993c10f0739d8551c1dad881d67e1f7771e33b80f72b40f717861e5fa917846ed304f0a31eb3c8aef8dd433f + languageName: node + linkType: hard + +"d3-array@npm:1 - 2": + version: 2.12.1 + resolution: "d3-array@npm:2.12.1" + dependencies: + internmap: "npm:^1.0.0" + checksum: 10c0/7eca10427a9f113a4ca6a0f7301127cab26043fd5e362631ef5a0edd1c4b2dd70c56ed317566700c31e4a6d88b55f3951aaba192291817f243b730cb2352882e + languageName: node + linkType: hard + +"d3-array@npm:2 - 3, d3-array@npm:2.10.0 - 3, d3-array@npm:2.5.0 - 3, d3-array@npm:3, d3-array@npm:^3.2.0": + version: 3.2.4 + resolution: "d3-array@npm:3.2.4" + dependencies: + internmap: "npm:1 - 2" + checksum: 10c0/08b95e91130f98c1375db0e0af718f4371ccacef7d5d257727fe74f79a24383e79aba280b9ffae655483ffbbad4fd1dec4ade0119d88c4749f388641c8bf8c50 + languageName: node + linkType: hard + +"d3-axis@npm:3": + version: 3.0.0 + resolution: "d3-axis@npm:3.0.0" + checksum: 10c0/a271e70ba1966daa5aaf6a7f959ceca3e12997b43297e757c7b945db2e1ead3c6ee226f2abcfa22abbd4e2e28bd2b71a0911794c4e5b911bbba271328a582c78 + languageName: node + linkType: hard + +"d3-brush@npm:3": + version: 3.0.0 + resolution: "d3-brush@npm:3.0.0" + dependencies: + d3-dispatch: "npm:1 - 3" + d3-drag: "npm:2 - 3" + d3-interpolate: "npm:1 - 3" + d3-selection: "npm:3" + d3-transition: "npm:3" + checksum: 10c0/07baf00334c576da2f68a91fc0da5732c3a5fa19bd3d7aed7fd24d1d674a773f71a93e9687c154176f7246946194d77c48c2d8fed757f5dcb1a4740067ec50a8 + languageName: node + linkType: hard + +"d3-chord@npm:3": + version: 3.0.1 + resolution: "d3-chord@npm:3.0.1" + dependencies: + d3-path: "npm:1 - 3" + checksum: 10c0/baa6013914af3f4fe1521f0d16de31a38eb8a71d08ff1dec4741f6f45a828661e5cd3935e39bd14e3032bdc78206c283ca37411da21d46ec3cfc520be6e7a7ce + languageName: node + linkType: hard + +"d3-color@npm:1 - 3, d3-color@npm:3": + version: 3.1.0 + resolution: "d3-color@npm:3.1.0" + checksum: 10c0/a4e20e1115fa696fce041fbe13fbc80dc4c19150fa72027a7c128ade980bc0eeeba4bcf28c9e21f0bce0e0dbfe7ca5869ef67746541dcfda053e4802ad19783c + languageName: node + linkType: hard + +"d3-contour@npm:4": + version: 4.0.2 + resolution: "d3-contour@npm:4.0.2" + dependencies: + d3-array: "npm:^3.2.0" + checksum: 10c0/98bc5fbed6009e08707434a952076f39f1cd6ed8b9288253cc3e6a3286e4e80c63c62d84954b20e64bf6e4ededcc69add54d3db25e990784a59c04edd3449032 + languageName: node + linkType: hard + +"d3-delaunay@npm:6": + version: 6.0.4 + resolution: "d3-delaunay@npm:6.0.4" + dependencies: + delaunator: "npm:5" + checksum: 10c0/57c3aecd2525664b07c4c292aa11cf49b2752c0cf3f5257f752999399fe3c592de2d418644d79df1f255471eec8057a9cc0c3062ed7128cb3348c45f69597754 + languageName: node + linkType: hard + +"d3-dispatch@npm:1 - 3, d3-dispatch@npm:3": + version: 3.0.1 + resolution: "d3-dispatch@npm:3.0.1" + checksum: 10c0/6eca77008ce2dc33380e45d4410c67d150941df7ab45b91d116dbe6d0a3092c0f6ac184dd4602c796dc9e790222bad3ff7142025f5fd22694efe088d1d941753 + languageName: node + linkType: hard + +"d3-drag@npm:2 - 3, d3-drag@npm:3": + version: 3.0.0 + resolution: "d3-drag@npm:3.0.0" + dependencies: + d3-dispatch: "npm:1 - 3" + d3-selection: "npm:3" + checksum: 10c0/d2556e8dc720741a443b595a30af403dd60642dfd938d44d6e9bfc4c71a962142f9a028c56b61f8b4790b65a34acad177d1263d66f103c3c527767b0926ef5aa + languageName: node + linkType: hard + +"d3-dsv@npm:1 - 3, d3-dsv@npm:3": + version: 3.0.1 + resolution: "d3-dsv@npm:3.0.1" + dependencies: + commander: "npm:7" + iconv-lite: "npm:0.6" + rw: "npm:1" + bin: + csv2json: bin/dsv2json.js + csv2tsv: bin/dsv2dsv.js + dsv2dsv: bin/dsv2dsv.js + dsv2json: bin/dsv2json.js + json2csv: bin/json2dsv.js + json2dsv: bin/json2dsv.js + json2tsv: bin/json2dsv.js + tsv2csv: bin/dsv2dsv.js + tsv2json: bin/dsv2json.js + checksum: 10c0/10e6af9e331950ed258f34ab49ac1b7060128ef81dcf32afc790bd1f7e8c3cc2aac7f5f875250a83f21f39bb5925fbd0872bb209f8aca32b3b77d32bab8a65ab + languageName: node + linkType: hard + +"d3-ease@npm:1 - 3, d3-ease@npm:3": + version: 3.0.1 + resolution: "d3-ease@npm:3.0.1" + checksum: 10c0/fec8ef826c0cc35cda3092c6841e07672868b1839fcaf556e19266a3a37e6bc7977d8298c0fcb9885e7799bfdcef7db1baaba9cd4dcf4bc5e952cf78574a88b0 + languageName: node + linkType: hard + +"d3-fetch@npm:3": + version: 3.0.1 + resolution: "d3-fetch@npm:3.0.1" + dependencies: + d3-dsv: "npm:1 - 3" + checksum: 10c0/4f467a79bf290395ac0cbb5f7562483f6a18668adc4c8eb84c9d3eff048b6f6d3b6f55079ba1ebf1908dabe000c941d46be447f8d78453b2dad5fb59fb6aa93b + languageName: node + linkType: hard + +"d3-force@npm:3": + version: 3.0.0 + resolution: "d3-force@npm:3.0.0" + dependencies: + d3-dispatch: "npm:1 - 3" + d3-quadtree: "npm:1 - 3" + d3-timer: "npm:1 - 3" + checksum: 10c0/220a16a1a1ac62ba56df61028896e4b52be89c81040d20229c876efc8852191482c233f8a52bb5a4e0875c321b8e5cb6413ef3dfa4d8fe79eeb7d52c587f52cf + languageName: node + linkType: hard + +"d3-format@npm:1 - 3, d3-format@npm:3": + version: 3.1.0 + resolution: "d3-format@npm:3.1.0" + checksum: 10c0/049f5c0871ebce9859fc5e2f07f336b3c5bfff52a2540e0bac7e703fce567cd9346f4ad1079dd18d6f1e0eaa0599941c1810898926f10ac21a31fd0a34b4aa75 + languageName: node + linkType: hard + +"d3-geo@npm:3": + version: 3.1.1 + resolution: "d3-geo@npm:3.1.1" + dependencies: + d3-array: "npm:2.5.0 - 3" + checksum: 10c0/d32270dd2dc8ac3ea63e8805d63239c4c8ec6c0d339d73b5e5a30a87f8f54db22a78fb434369799465eae169503b25f9a107c642c8a16c32a3285bc0e6d8e8c1 + languageName: node + linkType: hard + +"d3-hierarchy@npm:3": + version: 3.1.2 + resolution: "d3-hierarchy@npm:3.1.2" + checksum: 10c0/6dcdb480539644aa7fc0d72dfc7b03f99dfbcdf02714044e8c708577e0d5981deb9d3e99bbbb2d26422b55bcc342ac89a0fa2ea6c9d7302e2fc0951dd96f89cf + languageName: node + linkType: hard + +"d3-interpolate@npm:1 - 3, d3-interpolate@npm:1.2.0 - 3, d3-interpolate@npm:3": + version: 3.0.1 + resolution: "d3-interpolate@npm:3.0.1" + dependencies: + d3-color: "npm:1 - 3" + checksum: 10c0/19f4b4daa8d733906671afff7767c19488f51a43d251f8b7f484d5d3cfc36c663f0a66c38fe91eee30f40327443d799be17169f55a293a3ba949e84e57a33e6a + languageName: node + linkType: hard + +"d3-path@npm:1": + version: 1.0.9 + resolution: "d3-path@npm:1.0.9" + checksum: 10c0/e35e84df5abc18091f585725b8235e1fa97efc287571585427d3a3597301e6c506dea56b11dfb3c06ca5858b3eb7f02c1bf4f6a716aa9eade01c41b92d497eb5 + languageName: node + linkType: hard + +"d3-path@npm:1 - 3, d3-path@npm:3, d3-path@npm:^3.1.0": + version: 3.1.0 + resolution: "d3-path@npm:3.1.0" + checksum: 10c0/dc1d58ec87fa8319bd240cf7689995111a124b141428354e9637aa83059eb12e681f77187e0ada5dedfce346f7e3d1f903467ceb41b379bfd01cd8e31721f5da + languageName: node + linkType: hard + +"d3-polygon@npm:3": + version: 3.0.1 + resolution: "d3-polygon@npm:3.0.1" + checksum: 10c0/e236aa7f33efa9a4072907af7dc119f85b150a0716759d4fe5f12f62573018264a6cbde8617fbfa6944a7ae48c1c0c8d3f39ae72e11f66dd471e9b5e668385df + languageName: node + linkType: hard + +"d3-quadtree@npm:1 - 3, d3-quadtree@npm:3": + version: 3.0.1 + resolution: "d3-quadtree@npm:3.0.1" + checksum: 10c0/18302d2548bfecaef788152397edec95a76400fd97d9d7f42a089ceb68d910f685c96579d74e3712d57477ed042b056881b47cd836a521de683c66f47ce89090 + languageName: node + linkType: hard + +"d3-random@npm:3": + version: 3.0.1 + resolution: "d3-random@npm:3.0.1" + checksum: 10c0/987a1a1bcbf26e6cf01fd89d5a265b463b2cea93560fc17d9b1c45e8ed6ff2db5924601bcceb808de24c94133f000039eb7fa1c469a7a844ccbf1170cbb25b41 + languageName: node + linkType: hard + +"d3-sankey@npm:^0.12.3": + version: 0.12.3 + resolution: "d3-sankey@npm:0.12.3" + dependencies: + d3-array: "npm:1 - 2" + d3-shape: "npm:^1.2.0" + checksum: 10c0/261debb01a13269f6fc53b9ebaef174a015d5ad646242c23995bf514498829ab8b8f920a7873724a7494288b46bea3ce7ebc5a920b745bc8ae4caa5885cf5204 + languageName: node + linkType: hard + +"d3-scale-chromatic@npm:3": + version: 3.1.0 + resolution: "d3-scale-chromatic@npm:3.1.0" + dependencies: + d3-color: "npm:1 - 3" + d3-interpolate: "npm:1 - 3" + checksum: 10c0/9a3f4671ab0b971f4a411b42180d7cf92bfe8e8584e637ce7e698d705e18d6d38efbd20ec64f60cc0dfe966c20d40fc172565bc28aaa2990c0a006360eed91af + languageName: node + linkType: hard + +"d3-scale@npm:4": + version: 4.0.2 + resolution: "d3-scale@npm:4.0.2" + dependencies: + d3-array: "npm:2.10.0 - 3" + d3-format: "npm:1 - 3" + d3-interpolate: "npm:1.2.0 - 3" + d3-time: "npm:2.1.1 - 3" + d3-time-format: "npm:2 - 4" + checksum: 10c0/65d9ad8c2641aec30ed5673a7410feb187a224d6ca8d1a520d68a7d6eac9d04caedbff4713d1e8545be33eb7fec5739983a7ab1d22d4e5ad35368c6729d362f1 + languageName: node + linkType: hard + +"d3-selection@npm:2 - 3, d3-selection@npm:3": + version: 3.0.0 + resolution: "d3-selection@npm:3.0.0" + checksum: 10c0/e59096bbe8f0cb0daa1001d9bdd6dbc93a688019abc97d1d8b37f85cd3c286a6875b22adea0931b0c88410d025563e1643019161a883c516acf50c190a11b56b + languageName: node + linkType: hard + +"d3-shape@npm:3": + version: 3.2.0 + resolution: "d3-shape@npm:3.2.0" + dependencies: + d3-path: "npm:^3.1.0" + checksum: 10c0/f1c9d1f09926daaf6f6193ae3b4c4b5521e81da7d8902d24b38694517c7f527ce3c9a77a9d3a5722ad1e3ff355860b014557b450023d66a944eabf8cfde37132 + languageName: node + linkType: hard + +"d3-shape@npm:^1.2.0": + version: 1.3.7 + resolution: "d3-shape@npm:1.3.7" + dependencies: + d3-path: "npm:1" + checksum: 10c0/548057ce59959815decb449f15632b08e2a1bdce208f9a37b5f98ec7629dda986c2356bc7582308405ce68aedae7d47b324df41507404df42afaf352907577ae + languageName: node + linkType: hard + +"d3-time-format@npm:2 - 4, d3-time-format@npm:4": + version: 4.1.0 + resolution: "d3-time-format@npm:4.1.0" + dependencies: + d3-time: "npm:1 - 3" + checksum: 10c0/735e00fb25a7fd5d418fac350018713ae394eefddb0d745fab12bbff0517f9cdb5f807c7bbe87bb6eeb06249662f8ea84fec075f7d0cd68609735b2ceb29d206 + languageName: node + linkType: hard + +"d3-time@npm:1 - 3, d3-time@npm:2.1.1 - 3, d3-time@npm:3": + version: 3.1.0 + resolution: "d3-time@npm:3.1.0" + dependencies: + d3-array: "npm:2 - 3" + checksum: 10c0/a984f77e1aaeaa182679b46fbf57eceb6ebdb5f67d7578d6f68ef933f8eeb63737c0949991618a8d29472dbf43736c7d7f17c452b2770f8c1271191cba724ca1 + languageName: node + linkType: hard + +"d3-timer@npm:1 - 3, d3-timer@npm:3": + version: 3.0.1 + resolution: "d3-timer@npm:3.0.1" + checksum: 10c0/d4c63cb4bb5461d7038aac561b097cd1c5673969b27cbdd0e87fa48d9300a538b9e6f39b4a7f0e3592ef4f963d858c8a9f0e92754db73116770856f2fc04561a + languageName: node + linkType: hard + +"d3-transition@npm:2 - 3, d3-transition@npm:3": + version: 3.0.1 + resolution: "d3-transition@npm:3.0.1" + dependencies: + d3-color: "npm:1 - 3" + d3-dispatch: "npm:1 - 3" + d3-ease: "npm:1 - 3" + d3-interpolate: "npm:1 - 3" + d3-timer: "npm:1 - 3" + peerDependencies: + d3-selection: 2 - 3 + checksum: 10c0/4e74535dda7024aa43e141635b7522bb70cf9d3dfefed975eb643b36b864762eca67f88fafc2ca798174f83ca7c8a65e892624f824b3f65b8145c6a1a88dbbad + languageName: node + linkType: hard + +"d3-zoom@npm:3": + version: 3.0.0 + resolution: "d3-zoom@npm:3.0.0" + dependencies: + d3-dispatch: "npm:1 - 3" + d3-drag: "npm:2 - 3" + d3-interpolate: "npm:1 - 3" + d3-selection: "npm:2 - 3" + d3-transition: "npm:2 - 3" + checksum: 10c0/ee2036479049e70d8c783d594c444fe00e398246048e3f11a59755cd0e21de62ece3126181b0d7a31bf37bcf32fd726f83ae7dea4495ff86ec7736ce5ad36fd3 + languageName: node + linkType: hard + +"d3@npm:^7.4.0, d3@npm:^7.8.2": + version: 7.9.0 + resolution: "d3@npm:7.9.0" + dependencies: + d3-array: "npm:3" + d3-axis: "npm:3" + d3-brush: "npm:3" + d3-chord: "npm:3" + d3-color: "npm:3" + d3-contour: "npm:4" + d3-delaunay: "npm:6" + d3-dispatch: "npm:3" + d3-drag: "npm:3" + d3-dsv: "npm:3" + d3-ease: "npm:3" + d3-fetch: "npm:3" + d3-force: "npm:3" + d3-format: "npm:3" + d3-geo: "npm:3" + d3-hierarchy: "npm:3" + d3-interpolate: "npm:3" + d3-path: "npm:3" + d3-polygon: "npm:3" + d3-quadtree: "npm:3" + d3-random: "npm:3" + d3-scale: "npm:4" + d3-scale-chromatic: "npm:3" + d3-selection: "npm:3" + d3-shape: "npm:3" + d3-time: "npm:3" + d3-time-format: "npm:4" + d3-timer: "npm:3" + d3-transition: "npm:3" + d3-zoom: "npm:3" + checksum: 10c0/3dd9c08c73cfaa69c70c49e603c85e049c3904664d9c79a1a52a0f52795828a1ff23592dc9a7b2257e711d68a615472a13103c212032f38e016d609796e087e8 + languageName: node + linkType: hard + +"dagre-d3-es@npm:7.0.10": + version: 7.0.10 + resolution: "dagre-d3-es@npm:7.0.10" + dependencies: + d3: "npm:^7.8.2" + lodash-es: "npm:^4.17.21" + checksum: 10c0/3e1bb6efe9a78cea3fe6ff265eb330692f057bf84c99d6a1d67db379231c37a1a1ca2e1ccc25a732ddf924cd5566062c033d88defd230debec324dc9256c6775 + languageName: node + linkType: hard + +"dagster-docs-beta@workspace:.": + version: 0.0.0-use.local + resolution: "dagster-docs-beta@workspace:." + dependencies: + "@babel/core": "npm:^7.25.2" + "@babel/eslint-parser": "npm:^7.25.1" + "@docusaurus/core": "npm:3.5.2" + "@docusaurus/eslint-plugin": "npm:^3.5.2" + "@docusaurus/module-type-aliases": "npm:3.5.2" + "@docusaurus/plugin-ideal-image": "npm:^3.5.2" + "@docusaurus/preset-classic": "npm:3.5.2" + "@docusaurus/theme-classic": "npm:^3.5.2" + "@docusaurus/theme-mermaid": "npm:^3.5.2" + "@docusaurus/tsconfig": "npm:3.5.2" + "@docusaurus/types": "npm:3.5.2" + "@eslint/js": "npm:^9.10.0" + "@mdx-js/react": "npm:^3.0.1" + "@types/babel__core": "npm:^7.20.5" + "@types/node": "npm:^22.5.4" + "@types/react": "npm:^18.3.5" + "@typescript-eslint/parser": "npm:^8.4.0" + clsx: "npm:^2.1.1" + docusaurus-plugin-image-zoom: "npm:^2.0.0" + docusaurus-plugin-sass: "npm:^0.2.5" + eslint: "npm:^8.57.0" + eslint-config-prettier: "npm:^9.1.0" + eslint-plugin-mdx: "npm:^3.1.5" + eslint-plugin-prettier: "npm:^5.2.1" + eslint-plugin-react: "npm:^7.35.2" + globals: "npm:^15.9.0" + modern-normalize: "npm:^3.0.1" + prettier: "npm:^3.3.3" + prettier-eslint: "npm:^16.3.0" + prism-react-renderer: "npm:^2.4.0" + raw-loader: "npm:^4.0.2" + react: "npm:^18.3.1" + react-dom: "npm:^18.3.1" + remark-frontmatter: "npm:^5.0.0" + remark-mdx: "npm:^3.0.1" + sass: "npm:^1.78.0" + typescript: "npm:~5.5.4" + typescript-eslint: "npm:^8.4.0" + webpack: "npm:^5.94.0" + languageName: unknown + linkType: soft + +"data-view-buffer@npm:^1.0.1": + version: 1.0.1 + resolution: "data-view-buffer@npm:1.0.1" + dependencies: + call-bind: "npm:^1.0.6" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.1" + checksum: 10c0/8984119e59dbed906a11fcfb417d7d861936f16697a0e7216fe2c6c810f6b5e8f4a5281e73f2c28e8e9259027190ac4a33e2a65fdd7fa86ac06b76e838918583 + languageName: node + linkType: hard + +"data-view-byte-length@npm:^1.0.1": + version: 1.0.1 + resolution: "data-view-byte-length@npm:1.0.1" + dependencies: + call-bind: "npm:^1.0.7" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.1" + checksum: 10c0/b7d9e48a0cf5aefed9ab7d123559917b2d7e0d65531f43b2fd95b9d3a6b46042dd3fca597c42bba384e66b70d7ad66ff23932f8367b241f53d93af42cfe04ec2 + languageName: node + linkType: hard + +"data-view-byte-offset@npm:^1.0.0": + version: 1.0.0 + resolution: "data-view-byte-offset@npm:1.0.0" + dependencies: + call-bind: "npm:^1.0.6" + es-errors: "npm:^1.3.0" + is-data-view: "npm:^1.0.1" + checksum: 10c0/21b0d2e53fd6e20cc4257c873bf6d36d77bd6185624b84076c0a1ddaa757b49aaf076254006341d35568e89f52eecd1ccb1a502cfb620f2beca04f48a6a62a8f + languageName: node + linkType: hard + +"dayjs@npm:^1.11.7": + version: 1.11.13 + resolution: "dayjs@npm:1.11.13" + checksum: 10c0/a3caf6ac8363c7dade9d1ee797848ddcf25c1ace68d9fe8678ecf8ba0675825430de5d793672ec87b24a69bf04a1544b176547b2539982275d5542a7955f35b7 + languageName: node + linkType: hard + +"debounce@npm:^1.2.1": + version: 1.2.1 + resolution: "debounce@npm:1.2.1" + checksum: 10c0/6c9320aa0973fc42050814621a7a8a78146c1975799b5b3cc1becf1f77ba9a5aa583987884230da0842a03f385def452fad5d60db97c3d1c8b824e38a8edf500 + languageName: node + linkType: hard + +"debug@npm:2.6.9, debug@npm:^2.6.0": + version: 2.6.9 + resolution: "debug@npm:2.6.9" + dependencies: + ms: "npm:2.0.0" + checksum: 10c0/121908fb839f7801180b69a7e218a40b5a0b718813b886b7d6bdb82001b931c938e2941d1e4450f33a1b1df1da653f5f7a0440c197f29fbf8a6e9d45ff6ef589 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4": + version: 4.3.6 + resolution: "debug@npm:4.3.6" + dependencies: + ms: "npm:2.1.2" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/3293416bff072389c101697d4611c402a6bacd1900ac20c0492f61a9cdd6b3b29750fc7f5e299f8058469ef60ff8fb79b86395a30374fbd2490113c1c7112285 + languageName: node + linkType: hard + +"decode-named-character-reference@npm:^1.0.0": + version: 1.0.2 + resolution: "decode-named-character-reference@npm:1.0.2" + dependencies: + character-entities: "npm:^2.0.0" + checksum: 10c0/66a9fc5d9b5385a2b3675c69ba0d8e893393d64057f7dbbb585265bb4fc05ec513d76943b8e5aac7d8016d20eea4499322cbf4cd6d54b466976b78f3a7587a4c + languageName: node + linkType: hard + +"decompress-response@npm:^6.0.0": + version: 6.0.0 + resolution: "decompress-response@npm:6.0.0" + dependencies: + mimic-response: "npm:^3.1.0" + checksum: 10c0/bd89d23141b96d80577e70c54fb226b2f40e74a6817652b80a116d7befb8758261ad073a8895648a29cc0a5947021ab66705cb542fa9c143c82022b27c5b175e + languageName: node + linkType: hard + +"deep-extend@npm:^0.6.0": + version: 0.6.0 + resolution: "deep-extend@npm:0.6.0" + checksum: 10c0/1c6b0abcdb901e13a44c7d699116d3d4279fdb261983122a3783e7273844d5f2537dc2e1c454a23fcf645917f93fbf8d07101c1d03c015a87faa662755212566 + languageName: node + linkType: hard + +"deep-is@npm:^0.1.3": + version: 0.1.4 + resolution: "deep-is@npm:0.1.4" + checksum: 10c0/7f0ee496e0dff14a573dc6127f14c95061b448b87b995fc96c017ce0a1e66af1675e73f1d6064407975bc4ea6ab679497a29fff7b5b9c4e99cb10797c1ad0b4c + languageName: node + linkType: hard + +"deepmerge@npm:^4.2.2, deepmerge@npm:^4.3.1": + version: 4.3.1 + resolution: "deepmerge@npm:4.3.1" + checksum: 10c0/e53481aaf1aa2c4082b5342be6b6d8ad9dfe387bc92ce197a66dea08bd4265904a087e75e464f14d1347cf2ac8afe1e4c16b266e0561cc5df29382d3c5f80044 + languageName: node + linkType: hard + +"default-gateway@npm:^6.0.3": + version: 6.0.3 + resolution: "default-gateway@npm:6.0.3" + dependencies: + execa: "npm:^5.0.0" + checksum: 10c0/5184f9e6e105d24fb44ade9e8741efa54bb75e84625c1ea78c4ef8b81dff09ca52d6dbdd1185cf0dc655bb6b282a64fffaf7ed2dd561b8d9ad6f322b1f039aba + languageName: node + linkType: hard + +"defer-to-connect@npm:^2.0.1": + version: 2.0.1 + resolution: "defer-to-connect@npm:2.0.1" + checksum: 10c0/625ce28e1b5ad10cf77057b9a6a727bf84780c17660f6644dab61dd34c23de3001f03cedc401f7d30a4ed9965c2e8a7336e220a329146f2cf85d4eddea429782 + languageName: node + linkType: hard + +"define-data-property@npm:^1.0.1, define-data-property@npm:^1.1.4": + version: 1.1.4 + resolution: "define-data-property@npm:1.1.4" + dependencies: + es-define-property: "npm:^1.0.0" + es-errors: "npm:^1.3.0" + gopd: "npm:^1.0.1" + checksum: 10c0/dea0606d1483eb9db8d930d4eac62ca0fa16738b0b3e07046cddfacf7d8c868bbe13fa0cb263eb91c7d0d527960dc3f2f2471a69ed7816210307f6744fe62e37 + languageName: node + linkType: hard + +"define-lazy-prop@npm:^2.0.0": + version: 2.0.0 + resolution: "define-lazy-prop@npm:2.0.0" + checksum: 10c0/db6c63864a9d3b7dc9def55d52764968a5af296de87c1b2cc71d8be8142e445208071953649e0386a8cc37cfcf9a2067a47207f1eb9ff250c2a269658fdae422 + languageName: node + linkType: hard + +"define-properties@npm:^1.1.3, define-properties@npm:^1.2.0, define-properties@npm:^1.2.1": + version: 1.2.1 + resolution: "define-properties@npm:1.2.1" + dependencies: + define-data-property: "npm:^1.0.1" + has-property-descriptors: "npm:^1.0.0" + object-keys: "npm:^1.1.1" + checksum: 10c0/88a152319ffe1396ccc6ded510a3896e77efac7a1bfbaa174a7b00414a1747377e0bb525d303794a47cf30e805c2ec84e575758512c6e44a993076d29fd4e6c3 + languageName: node + linkType: hard + +"del@npm:^6.1.1": + version: 6.1.1 + resolution: "del@npm:6.1.1" + dependencies: + globby: "npm:^11.0.1" + graceful-fs: "npm:^4.2.4" + is-glob: "npm:^4.0.1" + is-path-cwd: "npm:^2.2.0" + is-path-inside: "npm:^3.0.2" + p-map: "npm:^4.0.0" + rimraf: "npm:^3.0.2" + slash: "npm:^3.0.0" + checksum: 10c0/8a095c5ccade42c867a60252914ae485ec90da243d735d1f63ec1e64c1cfbc2b8810ad69a29ab6326d159d4fddaa2f5bad067808c42072351ec458efff86708f + languageName: node + linkType: hard + +"delaunator@npm:5": + version: 5.0.1 + resolution: "delaunator@npm:5.0.1" + dependencies: + robust-predicates: "npm:^3.0.2" + checksum: 10c0/3d7ea4d964731c5849af33fec0a271bc6753487b331fd7d43ccb17d77834706e1c383e6ab8fda0032da955e7576d1083b9603cdaf9cbdfd6b3ebd1fb8bb675a5 + languageName: node + linkType: hard + +"depd@npm:2.0.0": + version: 2.0.0 + resolution: "depd@npm:2.0.0" + checksum: 10c0/58bd06ec20e19529b06f7ad07ddab60e504d9e0faca4bd23079fac2d279c3594334d736508dc350e06e510aba5e22e4594483b3a6562ce7c17dd797f4cc4ad2c + languageName: node + linkType: hard + +"depd@npm:~1.1.2": + version: 1.1.2 + resolution: "depd@npm:1.1.2" + checksum: 10c0/acb24aaf936ef9a227b6be6d495f0d2eb20108a9a6ad40585c5bda1a897031512fef6484e4fdbb80bd249fdaa82841fa1039f416ece03188e677ba11bcfda249 + languageName: node + linkType: hard + +"dequal@npm:^2.0.0": + version: 2.0.3 + resolution: "dequal@npm:2.0.3" + checksum: 10c0/f98860cdf58b64991ae10205137c0e97d384c3a4edc7f807603887b7c4b850af1224a33d88012009f150861cbee4fa2d322c4cc04b9313bee312e47f6ecaa888 + languageName: node + linkType: hard + +"destroy@npm:1.2.0": + version: 1.2.0 + resolution: "destroy@npm:1.2.0" + checksum: 10c0/bd7633942f57418f5a3b80d5cb53898127bcf53e24cdf5d5f4396be471417671f0fee48a4ebe9a1e9defbde2a31280011af58a57e090ff822f589b443ed4e643 + languageName: node + linkType: hard + +"detect-libc@npm:^2.0.0, detect-libc@npm:^2.0.2": + version: 2.0.3 + resolution: "detect-libc@npm:2.0.3" + checksum: 10c0/88095bda8f90220c95f162bf92cad70bd0e424913e655c20578600e35b91edc261af27531cf160a331e185c0ced93944bc7e09939143225f56312d7fd800fdb7 + languageName: node + linkType: hard + +"detect-node@npm:^2.0.4": + version: 2.1.0 + resolution: "detect-node@npm:2.1.0" + checksum: 10c0/f039f601790f2e9d4654e499913259a798b1f5246ae24f86ab5e8bd4aaf3bce50484234c494f11fb00aecb0c6e2733aa7b1cf3f530865640b65fbbd65b2c4e09 + languageName: node + linkType: hard + +"detect-port-alt@npm:^1.1.6": + version: 1.1.6 + resolution: "detect-port-alt@npm:1.1.6" + dependencies: + address: "npm:^1.0.1" + debug: "npm:^2.6.0" + bin: + detect: ./bin/detect-port + detect-port: ./bin/detect-port + checksum: 10c0/7269e6aef7b782d98c77505c07a7a0f5e2ee98a9607dc791035fc0192fc58aa03cc833fae605e10eaf239a2a5a55cd938e0bb141dea764ac6180ca082fd62b23 + languageName: node + linkType: hard + +"detect-port@npm:^1.5.1": + version: 1.6.1 + resolution: "detect-port@npm:1.6.1" + dependencies: + address: "npm:^1.0.1" + debug: "npm:4" + bin: + detect: bin/detect-port.js + detect-port: bin/detect-port.js + checksum: 10c0/4ea9eb46a637cb21220dd0a62b6074792894fc77b2cacbc9de533d1908b2eedafa7bfd7547baaa2ac1e9c7ba7c289b34b17db896dca6da142f4fc6e2060eee17 + languageName: node + linkType: hard + +"devlop@npm:^1.0.0, devlop@npm:^1.1.0": + version: 1.1.0 + resolution: "devlop@npm:1.1.0" + dependencies: + dequal: "npm:^2.0.0" + checksum: 10c0/e0928ab8f94c59417a2b8389c45c55ce0a02d9ac7fd74ef62d01ba48060129e1d594501b77de01f3eeafc7cb00773819b0df74d96251cf20b31c5b3071f45c0e + languageName: node + linkType: hard + +"diff@npm:^5.0.0": + version: 5.2.0 + resolution: "diff@npm:5.2.0" + checksum: 10c0/aed0941f206fe261ecb258dc8d0ceea8abbde3ace5827518ff8d302f0fc9cc81ce116c4d8f379151171336caf0516b79e01abdc1ed1201b6440d895a66689eb4 + languageName: node + linkType: hard + +"dir-glob@npm:^3.0.1": + version: 3.0.1 + resolution: "dir-glob@npm:3.0.1" + dependencies: + path-type: "npm:^4.0.0" + checksum: 10c0/dcac00920a4d503e38bb64001acb19df4efc14536ada475725e12f52c16777afdee4db827f55f13a908ee7efc0cb282e2e3dbaeeb98c0993dd93d1802d3bf00c + languageName: node + linkType: hard + +"dlv@npm:^1.1.0": + version: 1.1.3 + resolution: "dlv@npm:1.1.3" + checksum: 10c0/03eb4e769f19a027fd5b43b59e8a05e3fd2100ac239ebb0bf9a745de35d449e2f25cfaf3aa3934664551d72856f4ae8b7822016ce5c42c2d27c18ae79429ec42 + languageName: node + linkType: hard + +"dns-packet@npm:^5.2.2": + version: 5.6.1 + resolution: "dns-packet@npm:5.6.1" + dependencies: + "@leichtgewicht/ip-codec": "npm:^2.0.1" + checksum: 10c0/8948d3d03063fb68e04a1e386875f8c3bcc398fc375f535f2b438fad8f41bf1afa6f5e70893ba44f4ae884c089247e0a31045722fa6ff0f01d228da103f1811d + languageName: node + linkType: hard + +"doctrine@npm:^2.1.0": + version: 2.1.0 + resolution: "doctrine@npm:2.1.0" + dependencies: + esutils: "npm:^2.0.2" + checksum: 10c0/b6416aaff1f380bf56c3b552f31fdf7a69b45689368deca72d28636f41c16bb28ec3ebc40ace97db4c1afc0ceeb8120e8492fe0046841c94c2933b2e30a7d5ac + languageName: node + linkType: hard + +"doctrine@npm:^3.0.0": + version: 3.0.0 + resolution: "doctrine@npm:3.0.0" + dependencies: + esutils: "npm:^2.0.2" + checksum: 10c0/c96bdccabe9d62ab6fea9399fdff04a66e6563c1d6fb3a3a063e8d53c3bb136ba63e84250bbf63d00086a769ad53aef92d2bd483f03f837fc97b71cbee6b2520 + languageName: node + linkType: hard + +"docusaurus-plugin-image-zoom@npm:^2.0.0": + version: 2.0.0 + resolution: "docusaurus-plugin-image-zoom@npm:2.0.0" + dependencies: + medium-zoom: "npm:^1.0.8" + validate-peer-dependencies: "npm:^2.2.0" + peerDependencies: + "@docusaurus/theme-classic": ">=3.0.0" + checksum: 10c0/882d603772f8d41cd217e2d5d40873f045f9734bc0af0d9d3b05cf3b6c99338e56172c71488f13f747c438e999e8a0b8412497c7ee431fa31fd1d9181014d0c6 + languageName: node + linkType: hard + +"docusaurus-plugin-sass@npm:^0.2.5": + version: 0.2.5 + resolution: "docusaurus-plugin-sass@npm:0.2.5" + dependencies: + sass-loader: "npm:^10.1.1" + peerDependencies: + "@docusaurus/core": ^2.0.0-beta || ^3.0.0-alpha + sass: ^1.30.0 + checksum: 10c0/1ed75a0e9ca52405ba474ad55add5318d786a18a551974520f1a9b6a4e5707cd4b20cd4fd4c60770e7cd683d7483372dd5f175ba13608048a1082847d3750bb7 + languageName: node + linkType: hard + +"dom-converter@npm:^0.2.0": + version: 0.2.0 + resolution: "dom-converter@npm:0.2.0" + dependencies: + utila: "npm:~0.4" + checksum: 10c0/e96aa63bd8c6ee3cd9ce19c3aecfc2c42e50a460e8087114794d4f5ecf3a4f052b34ea3bf2d73b5d80b4da619073b49905e6d7d788ceb7814ca4c29be5354a11 + languageName: node + linkType: hard + +"dom-serializer@npm:^1.0.1": + version: 1.4.1 + resolution: "dom-serializer@npm:1.4.1" + dependencies: + domelementtype: "npm:^2.0.1" + domhandler: "npm:^4.2.0" + entities: "npm:^2.0.0" + checksum: 10c0/67d775fa1ea3de52035c98168ddcd59418356943b5eccb80e3c8b3da53adb8e37edb2cc2f885802b7b1765bf5022aec21dfc32910d7f9e6de4c3148f095ab5e0 + languageName: node + linkType: hard + +"dom-serializer@npm:^2.0.0": + version: 2.0.0 + resolution: "dom-serializer@npm:2.0.0" + dependencies: + domelementtype: "npm:^2.3.0" + domhandler: "npm:^5.0.2" + entities: "npm:^4.2.0" + checksum: 10c0/d5ae2b7110ca3746b3643d3ef60ef823f5f078667baf530cec096433f1627ec4b6fa8c072f09d079d7cda915fd2c7bc1b7b935681e9b09e591e1e15f4040b8e2 + languageName: node + linkType: hard + +"domelementtype@npm:^2.0.1, domelementtype@npm:^2.2.0, domelementtype@npm:^2.3.0": + version: 2.3.0 + resolution: "domelementtype@npm:2.3.0" + checksum: 10c0/686f5a9ef0fff078c1412c05db73a0dce096190036f33e400a07e2a4518e9f56b1e324f5c576a0a747ef0e75b5d985c040b0d51945ce780c0dd3c625a18cd8c9 + languageName: node + linkType: hard + +"domhandler@npm:^4.0.0, domhandler@npm:^4.2.0, domhandler@npm:^4.3.1": + version: 4.3.1 + resolution: "domhandler@npm:4.3.1" + dependencies: + domelementtype: "npm:^2.2.0" + checksum: 10c0/5c199c7468cb052a8b5ab80b13528f0db3d794c64fc050ba793b574e158e67c93f8336e87fd81e9d5ee43b0e04aea4d8b93ed7be4899cb726a1601b3ba18538b + languageName: node + linkType: hard + +"domhandler@npm:^5.0.2, domhandler@npm:^5.0.3": + version: 5.0.3 + resolution: "domhandler@npm:5.0.3" + dependencies: + domelementtype: "npm:^2.3.0" + checksum: 10c0/bba1e5932b3e196ad6862286d76adc89a0dbf0c773e5ced1eb01f9af930c50093a084eff14b8de5ea60b895c56a04d5de8bbc4930c5543d029091916770b2d2a + languageName: node + linkType: hard + +"dompurify@npm:^3.0.5": + version: 3.1.6 + resolution: "dompurify@npm:3.1.6" + checksum: 10c0/3de1cca187c78d3d8cb4134fc2985b644d6a81f6b4e024c77cfb04c1c2f38544ccf7b0ea37a48ce22fcca64594170ed7c22252574c75b801c44345cdd7b06c64 + languageName: node + linkType: hard + +"domutils@npm:^2.5.2, domutils@npm:^2.8.0": + version: 2.8.0 + resolution: "domutils@npm:2.8.0" + dependencies: + dom-serializer: "npm:^1.0.1" + domelementtype: "npm:^2.2.0" + domhandler: "npm:^4.2.0" + checksum: 10c0/d58e2ae01922f0dd55894e61d18119924d88091837887bf1438f2327f32c65eb76426bd9384f81e7d6dcfb048e0f83c19b222ad7101176ad68cdc9c695b563db + languageName: node + linkType: hard + +"domutils@npm:^3.0.1": + version: 3.1.0 + resolution: "domutils@npm:3.1.0" + dependencies: + dom-serializer: "npm:^2.0.0" + domelementtype: "npm:^2.3.0" + domhandler: "npm:^5.0.3" + checksum: 10c0/342d64cf4d07b8a0573fb51e0a6312a88fb520c7fefd751870bf72fa5fc0f2e0cb9a3958a573610b1d608c6e2a69b8e9b4b40f0bfb8f87a71bce4f180cca1887 + languageName: node + linkType: hard + +"dot-case@npm:^3.0.4": + version: 3.0.4 + resolution: "dot-case@npm:3.0.4" + dependencies: + no-case: "npm:^3.0.4" + tslib: "npm:^2.0.3" + checksum: 10c0/5b859ea65097a7ea870e2c91b5768b72ddf7fa947223fd29e167bcdff58fe731d941c48e47a38ec8aa8e43044c8fbd15cd8fa21689a526bc34b6548197cd5b05 + languageName: node + linkType: hard + +"dot-prop@npm:^6.0.1": + version: 6.0.1 + resolution: "dot-prop@npm:6.0.1" + dependencies: + is-obj: "npm:^2.0.0" + checksum: 10c0/30e51ec6408978a6951b21e7bc4938aad01a86f2fdf779efe52330205c6bb8a8ea12f35925c2029d6dc9d1df22f916f32f828ce1e9b259b1371c580541c22b5a + languageName: node + linkType: hard + +"duplexer@npm:^0.1.2": + version: 0.1.2 + resolution: "duplexer@npm:0.1.2" + checksum: 10c0/c57bcd4bdf7e623abab2df43a7b5b23d18152154529d166c1e0da6bee341d84c432d157d7e97b32fecb1bf3a8b8857dd85ed81a915789f550637ed25b8e64fc2 + languageName: node + linkType: hard + +"eastasianwidth@npm:^0.2.0": + version: 0.2.0 + resolution: "eastasianwidth@npm:0.2.0" + checksum: 10c0/26f364ebcdb6395f95124fda411f63137a4bfb5d3a06453f7f23dfe52502905bd84e0488172e0f9ec295fdc45f05c23d5d91baf16bd26f0fe9acd777a188dc39 + languageName: node + linkType: hard + +"ee-first@npm:1.1.1": + version: 1.1.1 + resolution: "ee-first@npm:1.1.1" + checksum: 10c0/b5bb125ee93161bc16bfe6e56c6b04de5ad2aa44234d8f644813cc95d861a6910903132b05093706de2b706599367c4130eb6d170f6b46895686b95f87d017b7 + languageName: node + linkType: hard + +"electron-to-chromium@npm:^1.5.4": + version: 1.5.13 + resolution: "electron-to-chromium@npm:1.5.13" + checksum: 10c0/1d88ac39447e1d718c4296f92fe89836df4688daf2d362d6c49108136795f05a56dd9c950f1c6715e0395fa037c3b5f5ea686c543fdc90e6d74a005877c45022 + languageName: node + linkType: hard + +"elkjs@npm:^0.9.0": + version: 0.9.3 + resolution: "elkjs@npm:0.9.3" + checksum: 10c0/caf544ff4fce8442d1d3dd6dface176c9b2fe26fc1e34f56122828e6eef7d2d7fe70d3202f9f3ecf0feb6287d4c8430949f483e63e450a7454bb39ccffab3808 + languageName: node + linkType: hard + +"emoji-regex@npm:^10.2.1": + version: 10.3.0 + resolution: "emoji-regex@npm:10.3.0" + checksum: 10c0/b4838e8dcdceb44cf47f59abe352c25ff4fe7857acaf5fb51097c427f6f75b44d052eb907a7a3b86f86bc4eae3a93f5c2b7460abe79c407307e6212d65c91163 + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: 10c0/af014e759a72064cf66e6e694a7fc6b0ed3d8db680427b021a89727689671cefe9d04151b2cad51dbaf85d5ba790d061cd167f1cf32eb7b281f6368b3c181639 + languageName: node + linkType: hard + +"emojilib@npm:^2.4.0": + version: 2.4.0 + resolution: "emojilib@npm:2.4.0" + checksum: 10c0/6e66ba8921175842193f974e18af448bb6adb0cf7aeea75e08b9d4ea8e9baba0e4a5347b46ed901491dcaba277485891c33a8d70b0560ca5cc9672a94c21ab8f + languageName: node + linkType: hard + +"emojis-list@npm:^3.0.0": + version: 3.0.0 + resolution: "emojis-list@npm:3.0.0" + checksum: 10c0/7dc4394b7b910444910ad64b812392159a21e1a7ecc637c775a440227dcb4f80eff7fe61f4453a7d7603fa23d23d30cc93fe9e4b5ed985b88d6441cd4a35117b + languageName: node + linkType: hard + +"emoticon@npm:^4.0.1": + version: 4.1.0 + resolution: "emoticon@npm:4.1.0" + checksum: 10c0/b3bc0a9b370445ac1e980ccba7baea614b4648199cc6fa0a51696a6d2393733e8f985edc4f1af381a1903f625789483dd155de427ec9fa2ea415fac116adc06d + languageName: node + linkType: hard + +"encodeurl@npm:~1.0.2": + version: 1.0.2 + resolution: "encodeurl@npm:1.0.2" + checksum: 10c0/f6c2387379a9e7c1156c1c3d4f9cb7bb11cf16dd4c1682e1f6746512564b053df5781029b6061296832b59fb22f459dbe250386d217c2f6e203601abb2ee0bec + languageName: node + linkType: hard + +"encodeurl@npm:~2.0.0": + version: 2.0.0 + resolution: "encodeurl@npm:2.0.0" + checksum: 10c0/5d317306acb13e6590e28e27924c754163946a2480de11865c991a3a7eed4315cd3fba378b543ca145829569eefe9b899f3d84bb09870f675ae60bc924b01ceb + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: "npm:^0.6.2" + checksum: 10c0/36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 + languageName: node + linkType: hard + +"end-of-stream@npm:^1.1.0, end-of-stream@npm:^1.4.1": + version: 1.4.4 + resolution: "end-of-stream@npm:1.4.4" + dependencies: + once: "npm:^1.4.0" + checksum: 10c0/870b423afb2d54bb8d243c63e07c170409d41e20b47eeef0727547aea5740bd6717aca45597a9f2745525667a6b804c1e7bede41f856818faee5806dd9ff3975 + languageName: node + linkType: hard + +"enhanced-resolve@npm:^5.17.1": + version: 5.17.1 + resolution: "enhanced-resolve@npm:5.17.1" + dependencies: + graceful-fs: "npm:^4.2.4" + tapable: "npm:^2.2.0" + checksum: 10c0/81a0515675eca17efdba2cf5bad87abc91a528fc1191aad50e275e74f045b41506167d420099022da7181c8d787170ea41e4a11a0b10b7a16f6237daecb15370 + languageName: node + linkType: hard + +"entities@npm:^2.0.0": + version: 2.2.0 + resolution: "entities@npm:2.2.0" + checksum: 10c0/7fba6af1f116300d2ba1c5673fc218af1961b20908638391b4e1e6d5850314ee2ac3ec22d741b3a8060479911c99305164aed19b6254bde75e7e6b1b2c3f3aa3 + languageName: node + linkType: hard + +"entities@npm:^4.2.0, entities@npm:^4.4.0": + version: 4.5.0 + resolution: "entities@npm:4.5.0" + checksum: 10c0/5b039739f7621f5d1ad996715e53d964035f75ad3b9a4d38c6b3804bb226e282ffeae2443624d8fdd9c47d8e926ae9ac009c54671243f0c3294c26af7cc85250 + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 10c0/285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 10c0/b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 + languageName: node + linkType: hard + +"error-ex@npm:^1.3.1, error-ex@npm:^1.3.2": + version: 1.3.2 + resolution: "error-ex@npm:1.3.2" + dependencies: + is-arrayish: "npm:^0.2.1" + checksum: 10c0/ba827f89369b4c93382cfca5a264d059dfefdaa56ecc5e338ffa58a6471f5ed93b71a20add1d52290a4873d92381174382658c885ac1a2305f7baca363ce9cce + languageName: node + linkType: hard + +"es-abstract@npm:^1.17.5, es-abstract@npm:^1.22.1, es-abstract@npm:^1.22.3, es-abstract@npm:^1.23.0, es-abstract@npm:^1.23.1, es-abstract@npm:^1.23.2, es-abstract@npm:^1.23.3": + version: 1.23.3 + resolution: "es-abstract@npm:1.23.3" + dependencies: + array-buffer-byte-length: "npm:^1.0.1" + arraybuffer.prototype.slice: "npm:^1.0.3" + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.7" + data-view-buffer: "npm:^1.0.1" + data-view-byte-length: "npm:^1.0.1" + data-view-byte-offset: "npm:^1.0.0" + es-define-property: "npm:^1.0.0" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.0.0" + es-set-tostringtag: "npm:^2.0.3" + es-to-primitive: "npm:^1.2.1" + function.prototype.name: "npm:^1.1.6" + get-intrinsic: "npm:^1.2.4" + get-symbol-description: "npm:^1.0.2" + globalthis: "npm:^1.0.3" + gopd: "npm:^1.0.1" + has-property-descriptors: "npm:^1.0.2" + has-proto: "npm:^1.0.3" + has-symbols: "npm:^1.0.3" + hasown: "npm:^2.0.2" + internal-slot: "npm:^1.0.7" + is-array-buffer: "npm:^3.0.4" + is-callable: "npm:^1.2.7" + is-data-view: "npm:^1.0.1" + is-negative-zero: "npm:^2.0.3" + is-regex: "npm:^1.1.4" + is-shared-array-buffer: "npm:^1.0.3" + is-string: "npm:^1.0.7" + is-typed-array: "npm:^1.1.13" + is-weakref: "npm:^1.0.2" + object-inspect: "npm:^1.13.1" + object-keys: "npm:^1.1.1" + object.assign: "npm:^4.1.5" + regexp.prototype.flags: "npm:^1.5.2" + safe-array-concat: "npm:^1.1.2" + safe-regex-test: "npm:^1.0.3" + string.prototype.trim: "npm:^1.2.9" + string.prototype.trimend: "npm:^1.0.8" + string.prototype.trimstart: "npm:^1.0.8" + typed-array-buffer: "npm:^1.0.2" + typed-array-byte-length: "npm:^1.0.1" + typed-array-byte-offset: "npm:^1.0.2" + typed-array-length: "npm:^1.0.6" + unbox-primitive: "npm:^1.0.2" + which-typed-array: "npm:^1.1.15" + checksum: 10c0/d27e9afafb225c6924bee9971a7f25f20c314f2d6cb93a63cada4ac11dcf42040896a6c22e5fb8f2a10767055ed4ddf400be3b1eb12297d281726de470b75666 + languageName: node + linkType: hard + +"es-define-property@npm:^1.0.0": + version: 1.0.0 + resolution: "es-define-property@npm:1.0.0" + dependencies: + get-intrinsic: "npm:^1.2.4" + checksum: 10c0/6bf3191feb7ea2ebda48b577f69bdfac7a2b3c9bcf97307f55fd6ef1bbca0b49f0c219a935aca506c993d8c5d8bddd937766cb760cd5e5a1071351f2df9f9aa4 + languageName: node + linkType: hard + +"es-errors@npm:^1.2.1, es-errors@npm:^1.3.0": + version: 1.3.0 + resolution: "es-errors@npm:1.3.0" + checksum: 10c0/0a61325670072f98d8ae3b914edab3559b6caa980f08054a3b872052640d91da01d38df55df797fcc916389d77fc92b8d5906cf028f4db46d7e3003abecbca85 + languageName: node + linkType: hard + +"es-iterator-helpers@npm:^1.0.19": + version: 1.0.19 + resolution: "es-iterator-helpers@npm:1.0.19" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.3" + es-errors: "npm:^1.3.0" + es-set-tostringtag: "npm:^2.0.3" + function-bind: "npm:^1.1.2" + get-intrinsic: "npm:^1.2.4" + globalthis: "npm:^1.0.3" + has-property-descriptors: "npm:^1.0.2" + has-proto: "npm:^1.0.3" + has-symbols: "npm:^1.0.3" + internal-slot: "npm:^1.0.7" + iterator.prototype: "npm:^1.1.2" + safe-array-concat: "npm:^1.1.2" + checksum: 10c0/ae8f0241e383b3d197383b9842c48def7fce0255fb6ed049311b686ce295595d9e389b466f6a1b7d4e7bb92d82f5e716d6fae55e20c1040249bf976743b038c5 + languageName: node + linkType: hard + +"es-module-lexer@npm:^1.2.1": + version: 1.5.4 + resolution: "es-module-lexer@npm:1.5.4" + checksum: 10c0/300a469488c2f22081df1e4c8398c78db92358496e639b0df7f89ac6455462aaf5d8893939087c1a1cbcbf20eed4610c70e0bcb8f3e4b0d80a5d2611c539408c + languageName: node + linkType: hard + +"es-object-atoms@npm:^1.0.0": + version: 1.0.0 + resolution: "es-object-atoms@npm:1.0.0" + dependencies: + es-errors: "npm:^1.3.0" + checksum: 10c0/1fed3d102eb27ab8d983337bb7c8b159dd2a1e63ff833ec54eea1311c96d5b08223b433060ba240541ca8adba9eee6b0a60cdbf2f80634b784febc9cc8b687b4 + languageName: node + linkType: hard + +"es-set-tostringtag@npm:^2.0.3": + version: 2.0.3 + resolution: "es-set-tostringtag@npm:2.0.3" + dependencies: + get-intrinsic: "npm:^1.2.4" + has-tostringtag: "npm:^1.0.2" + hasown: "npm:^2.0.1" + checksum: 10c0/f22aff1585eb33569c326323f0b0d175844a1f11618b86e193b386f8be0ea9474cfbe46df39c45d959f7aa8f6c06985dc51dd6bce5401645ec5a74c4ceaa836a + languageName: node + linkType: hard + +"es-shim-unscopables@npm:^1.0.0, es-shim-unscopables@npm:^1.0.2": + version: 1.0.2 + resolution: "es-shim-unscopables@npm:1.0.2" + dependencies: + hasown: "npm:^2.0.0" + checksum: 10c0/f495af7b4b7601a4c0cfb893581c352636e5c08654d129590386a33a0432cf13a7bdc7b6493801cadd990d838e2839b9013d1de3b880440cb537825e834fe783 + languageName: node + linkType: hard + +"es-to-primitive@npm:^1.2.1": + version: 1.2.1 + resolution: "es-to-primitive@npm:1.2.1" + dependencies: + is-callable: "npm:^1.1.4" + is-date-object: "npm:^1.0.1" + is-symbol: "npm:^1.0.2" + checksum: 10c0/0886572b8dc075cb10e50c0af62a03d03a68e1e69c388bd4f10c0649ee41b1fbb24840a1b7e590b393011b5cdbe0144b776da316762653685432df37d6de60f1 + languageName: node + linkType: hard + +"escalade@npm:^3.1.1, escalade@npm:^3.1.2": + version: 3.1.2 + resolution: "escalade@npm:3.1.2" + checksum: 10c0/6b4adafecd0682f3aa1cd1106b8fff30e492c7015b178bc81b2d2f75106dabea6c6d6e8508fc491bd58e597c74abb0e8e2368f943ecb9393d4162e3c2f3cf287 + languageName: node + linkType: hard + +"escape-goat@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-goat@npm:4.0.0" + checksum: 10c0/9d2a8314e2370f2dd9436d177f6b3b1773525df8f895c8f3e1acb716f5fd6b10b336cb1cd9862d4709b36eb207dbe33664838deca9c6d55b8371be4eebb972f6 + languageName: node + linkType: hard + +"escape-html@npm:^1.0.3, escape-html@npm:~1.0.3": + version: 1.0.3 + resolution: "escape-html@npm:1.0.3" + checksum: 10c0/524c739d776b36c3d29fa08a22e03e8824e3b2fd57500e5e44ecf3cc4707c34c60f9ca0781c0e33d191f2991161504c295e98f68c78fe7baa6e57081ec6ac0a3 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^1.0.2, escape-string-regexp@npm:^1.0.5": + version: 1.0.5 + resolution: "escape-string-regexp@npm:1.0.5" + checksum: 10c0/a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 10c0/9497d4dd307d845bd7f75180d8188bb17ea8c151c1edbf6b6717c100e104d629dc2dfb687686181b0f4b7d732c7dfdc4d5e7a8ff72de1b0ca283a75bbb3a9cd9 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^5.0.0": + version: 5.0.0 + resolution: "escape-string-regexp@npm:5.0.0" + checksum: 10c0/6366f474c6f37a802800a435232395e04e9885919873e382b157ab7e8f0feb8fed71497f84a6f6a81a49aab41815522f5839112bd38026d203aea0c91622df95 + languageName: node + linkType: hard + +"eslint-config-prettier@npm:^9.1.0": + version: 9.1.0 + resolution: "eslint-config-prettier@npm:9.1.0" + peerDependencies: + eslint: ">=7.0.0" + bin: + eslint-config-prettier: bin/cli.js + checksum: 10c0/6d332694b36bc9ac6fdb18d3ca2f6ac42afa2ad61f0493e89226950a7091e38981b66bac2b47ba39d15b73fff2cd32c78b850a9cf9eed9ca9a96bfb2f3a2f10d + languageName: node + linkType: hard + +"eslint-mdx@npm:^3.1.5": + version: 3.1.5 + resolution: "eslint-mdx@npm:3.1.5" + dependencies: + acorn: "npm:^8.11.3" + acorn-jsx: "npm:^5.3.2" + espree: "npm:^9.6.1" + estree-util-visit: "npm:^2.0.0" + remark-mdx: "npm:^3.0.0" + remark-parse: "npm:^11.0.0" + remark-stringify: "npm:^11.0.0" + synckit: "npm:^0.9.0" + tslib: "npm:^2.6.2" + unified: "npm:^11.0.4" + unified-engine: "npm:^11.2.0" + unist-util-visit: "npm:^5.0.0" + uvu: "npm:^0.5.6" + vfile: "npm:^6.0.1" + peerDependencies: + eslint: ">=8.0.0" + checksum: 10c0/3a9e22ba5ead1d2f811adefd0c3aa54ed85c01329c2aaab25514d9da6a39c8a5bc44d568145f082cffab7d9368ca2730ab314e4f8d2b281ac47f86199d2014d3 + languageName: node + linkType: hard + +"eslint-plugin-markdown@npm:^3.0.1": + version: 3.0.1 + resolution: "eslint-plugin-markdown@npm:3.0.1" + dependencies: + mdast-util-from-markdown: "npm:^0.8.5" + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: 10c0/ba27a0f2115b85294591f3cf4e64c66b60cd508915cc3394869dda38c9e1f5ef230158f180cc21b5431085d4e4daac9f3f173078c00b54e659272318d0e6600d + languageName: node + linkType: hard + +"eslint-plugin-mdx@npm:^3.1.5": + version: 3.1.5 + resolution: "eslint-plugin-mdx@npm:3.1.5" + dependencies: + eslint-mdx: "npm:^3.1.5" + eslint-plugin-markdown: "npm:^3.0.1" + remark-mdx: "npm:^3.0.0" + remark-parse: "npm:^11.0.0" + remark-stringify: "npm:^11.0.0" + tslib: "npm:^2.6.2" + unified: "npm:^11.0.4" + vfile: "npm:^6.0.1" + peerDependencies: + eslint: ">=8.0.0" + checksum: 10c0/261e3ffee01bae7839b1357a7fb00ab23438d3b6fe6ad65b97dd06fbf2501571b95313914b0e41bf489ffd26d250acc7dfefc2f492247e6c2c343560a93693ce + languageName: node + linkType: hard + +"eslint-plugin-prettier@npm:^5.2.1": + version: 5.2.1 + resolution: "eslint-plugin-prettier@npm:5.2.1" + dependencies: + prettier-linter-helpers: "npm:^1.0.0" + synckit: "npm:^0.9.1" + peerDependencies: + "@types/eslint": ">=8.0.0" + eslint: ">=8.0.0" + eslint-config-prettier: "*" + prettier: ">=3.0.0" + peerDependenciesMeta: + "@types/eslint": + optional: true + eslint-config-prettier: + optional: true + checksum: 10c0/4bc8bbaf5bb556c9c501dcdff369137763c49ccaf544f9fa91400360ed5e3a3f1234ab59690e06beca5b1b7e6f6356978cdd3b02af6aba3edea2ffe69ca6e8b2 + languageName: node + linkType: hard + +"eslint-plugin-react@npm:^7.35.2": + version: 7.35.2 + resolution: "eslint-plugin-react@npm:7.35.2" + dependencies: + array-includes: "npm:^3.1.8" + array.prototype.findlast: "npm:^1.2.5" + array.prototype.flatmap: "npm:^1.3.2" + array.prototype.tosorted: "npm:^1.1.4" + doctrine: "npm:^2.1.0" + es-iterator-helpers: "npm:^1.0.19" + estraverse: "npm:^5.3.0" + hasown: "npm:^2.0.2" + jsx-ast-utils: "npm:^2.4.1 || ^3.0.0" + minimatch: "npm:^3.1.2" + object.entries: "npm:^1.1.8" + object.fromentries: "npm:^2.0.8" + object.values: "npm:^1.2.0" + prop-types: "npm:^15.8.1" + resolve: "npm:^2.0.0-next.5" + semver: "npm:^6.3.1" + string.prototype.matchall: "npm:^4.0.11" + string.prototype.repeat: "npm:^1.0.0" + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7 + checksum: 10c0/5f891f5a77e902a0ca8d10b23d0b800e90a09400187febe5986c5078d6277baa4b974d6acdbba25baae065dbcf12eb9241b5f5782527d0780314c2ee5006a8af + languageName: node + linkType: hard + +"eslint-scope@npm:5.1.1, eslint-scope@npm:^5.1.1": + version: 5.1.1 + resolution: "eslint-scope@npm:5.1.1" + dependencies: + esrecurse: "npm:^4.3.0" + estraverse: "npm:^4.1.1" + checksum: 10c0/d30ef9dc1c1cbdece34db1539a4933fe3f9b14e1ffb27ecc85987902ee663ad7c9473bbd49a9a03195a373741e62e2f807c4938992e019b511993d163450e70a + languageName: node + linkType: hard + +"eslint-scope@npm:^7.1.1, eslint-scope@npm:^7.2.2": + version: 7.2.2 + resolution: "eslint-scope@npm:7.2.2" + dependencies: + esrecurse: "npm:^4.3.0" + estraverse: "npm:^5.2.0" + checksum: 10c0/613c267aea34b5a6d6c00514e8545ef1f1433108097e857225fed40d397dd6b1809dffd11c2fde23b37ca53d7bf935fe04d2a18e6fc932b31837b6ad67e1c116 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^2.1.0": + version: 2.1.0 + resolution: "eslint-visitor-keys@npm:2.1.0" + checksum: 10c0/9f0e3a2db751d84067d15977ac4b4472efd6b303e369e6ff241a99feac04da758f46d5add022c33d06b53596038dbae4b4aceb27c7e68b8dfc1055b35e495787 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^3.3.0, eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": + version: 3.4.3 + resolution: "eslint-visitor-keys@npm:3.4.3" + checksum: 10c0/92708e882c0a5ffd88c23c0b404ac1628cf20104a108c745f240a13c332a11aac54f49a22d5762efbffc18ecbc9a580d1b7ad034bf5f3cc3307e5cbff2ec9820 + languageName: node + linkType: hard + +"eslint@npm:^8.57.0, eslint@npm:^8.7.0": + version: 8.57.0 + resolution: "eslint@npm:8.57.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.2.0" + "@eslint-community/regexpp": "npm:^4.6.1" + "@eslint/eslintrc": "npm:^2.1.4" + "@eslint/js": "npm:8.57.0" + "@humanwhocodes/config-array": "npm:^0.11.14" + "@humanwhocodes/module-importer": "npm:^1.0.1" + "@nodelib/fs.walk": "npm:^1.2.8" + "@ungap/structured-clone": "npm:^1.2.0" + ajv: "npm:^6.12.4" + chalk: "npm:^4.0.0" + cross-spawn: "npm:^7.0.2" + debug: "npm:^4.3.2" + doctrine: "npm:^3.0.0" + escape-string-regexp: "npm:^4.0.0" + eslint-scope: "npm:^7.2.2" + eslint-visitor-keys: "npm:^3.4.3" + espree: "npm:^9.6.1" + esquery: "npm:^1.4.2" + esutils: "npm:^2.0.2" + fast-deep-equal: "npm:^3.1.3" + file-entry-cache: "npm:^6.0.1" + find-up: "npm:^5.0.0" + glob-parent: "npm:^6.0.2" + globals: "npm:^13.19.0" + graphemer: "npm:^1.4.0" + ignore: "npm:^5.2.0" + imurmurhash: "npm:^0.1.4" + is-glob: "npm:^4.0.0" + is-path-inside: "npm:^3.0.3" + js-yaml: "npm:^4.1.0" + json-stable-stringify-without-jsonify: "npm:^1.0.1" + levn: "npm:^0.4.1" + lodash.merge: "npm:^4.6.2" + minimatch: "npm:^3.1.2" + natural-compare: "npm:^1.4.0" + optionator: "npm:^0.9.3" + strip-ansi: "npm:^6.0.1" + text-table: "npm:^0.2.0" + bin: + eslint: bin/eslint.js + checksum: 10c0/00bb96fd2471039a312435a6776fe1fd557c056755eaa2b96093ef3a8508c92c8775d5f754768be6b1dddd09fdd3379ddb231eeb9b6c579ee17ea7d68000a529 + languageName: node + linkType: hard + +"espree@npm:^9.3.1, espree@npm:^9.6.0, espree@npm:^9.6.1": + version: 9.6.1 + resolution: "espree@npm:9.6.1" + dependencies: + acorn: "npm:^8.9.0" + acorn-jsx: "npm:^5.3.2" + eslint-visitor-keys: "npm:^3.4.1" + checksum: 10c0/1a2e9b4699b715347f62330bcc76aee224390c28bb02b31a3752e9d07549c473f5f986720483c6469cf3cfb3c9d05df612ffc69eb1ee94b54b739e67de9bb460 + languageName: node + linkType: hard + +"esprima@npm:^4.0.0": + version: 4.0.1 + resolution: "esprima@npm:4.0.1" + bin: + esparse: ./bin/esparse.js + esvalidate: ./bin/esvalidate.js + checksum: 10c0/ad4bab9ead0808cf56501750fd9d3fb276f6b105f987707d059005d57e182d18a7c9ec7f3a01794ebddcca676773e42ca48a32d67a250c9d35e009ca613caba3 + languageName: node + linkType: hard + +"esquery@npm:^1.4.0, esquery@npm:^1.4.2": + version: 1.6.0 + resolution: "esquery@npm:1.6.0" + dependencies: + estraverse: "npm:^5.1.0" + checksum: 10c0/cb9065ec605f9da7a76ca6dadb0619dfb611e37a81e318732977d90fab50a256b95fee2d925fba7c2f3f0523aa16f91587246693bc09bc34d5a59575fe6e93d2 + languageName: node + linkType: hard + +"esrecurse@npm:^4.3.0": + version: 4.3.0 + resolution: "esrecurse@npm:4.3.0" + dependencies: + estraverse: "npm:^5.2.0" + checksum: 10c0/81a37116d1408ded88ada45b9fb16dbd26fba3aadc369ce50fcaf82a0bac12772ebd7b24cd7b91fc66786bf2c1ac7b5f196bc990a473efff972f5cb338877cf5 + languageName: node + linkType: hard + +"estraverse@npm:^4.1.1": + version: 4.3.0 + resolution: "estraverse@npm:4.3.0" + checksum: 10c0/9cb46463ef8a8a4905d3708a652d60122a0c20bb58dec7e0e12ab0e7235123d74214fc0141d743c381813e1b992767e2708194f6f6e0f9fd00c1b4e0887b8b6d + languageName: node + linkType: hard + +"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0, estraverse@npm:^5.3.0": + version: 5.3.0 + resolution: "estraverse@npm:5.3.0" + checksum: 10c0/1ff9447b96263dec95d6d67431c5e0771eb9776427421260a3e2f0fdd5d6bd4f8e37a7338f5ad2880c9f143450c9b1e4fc2069060724570a49cf9cf0312bd107 + languageName: node + linkType: hard + +"estree-util-attach-comments@npm:^3.0.0": + version: 3.0.0 + resolution: "estree-util-attach-comments@npm:3.0.0" + dependencies: + "@types/estree": "npm:^1.0.0" + checksum: 10c0/ee69bb5c45e2ad074725b90ed181c1c934b29d81bce4b0c7761431e83c4c6ab1b223a6a3d6a4fbeb92128bc5d5ee201d5dd36cf1770aa5e16a40b0cf36e8a1f1 + languageName: node + linkType: hard + +"estree-util-build-jsx@npm:^3.0.0": + version: 3.0.1 + resolution: "estree-util-build-jsx@npm:3.0.1" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + devlop: "npm:^1.0.0" + estree-util-is-identifier-name: "npm:^3.0.0" + estree-walker: "npm:^3.0.0" + checksum: 10c0/274c119817b8e7caa14a9778f1e497fea56cdd2b01df1a1ed037f843178992d3afe85e0d364d485e1e2e239255763553d1b647b15e4a7ba50851bcb43dc6bf80 + languageName: node + linkType: hard + +"estree-util-is-identifier-name@npm:^3.0.0": + version: 3.0.0 + resolution: "estree-util-is-identifier-name@npm:3.0.0" + checksum: 10c0/d1881c6ed14bd588ebd508fc90bf2a541811dbb9ca04dec2f39d27dcaa635f85b5ed9bbbe7fc6fb1ddfca68744a5f7c70456b4b7108b6c4c52780631cc787c5b + languageName: node + linkType: hard + +"estree-util-to-js@npm:^2.0.0": + version: 2.0.0 + resolution: "estree-util-to-js@npm:2.0.0" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + astring: "npm:^1.8.0" + source-map: "npm:^0.7.0" + checksum: 10c0/ac88cb831401ef99e365f92f4af903755d56ae1ce0e0f0fb8ff66e678141f3d529194f0fb15f6c78cd7554c16fda36854df851d58f9e05cfab15bddf7a97cea0 + languageName: node + linkType: hard + +"estree-util-value-to-estree@npm:^3.0.1": + version: 3.1.2 + resolution: "estree-util-value-to-estree@npm:3.1.2" + dependencies: + "@types/estree": "npm:^1.0.0" + checksum: 10c0/fb0fa42f44488eeb2357b60dc3fd5581422b0a36144fd90639fd3963c7396f225e7d7efeee0144b0a7293ea00e4ec9647b8302d057d48f894e8d5775c3c72eb7 + languageName: node + linkType: hard + +"estree-util-visit@npm:^2.0.0": + version: 2.0.0 + resolution: "estree-util-visit@npm:2.0.0" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + "@types/unist": "npm:^3.0.0" + checksum: 10c0/acda8b03cc8f890d79c7c7361f6c95331ba84b7ccc0c32b49f447fc30206b20002b37ffdfc97b6ad16e6fe065c63ecbae1622492e2b6b4775c15966606217f39 + languageName: node + linkType: hard + +"estree-walker@npm:^3.0.0": + version: 3.0.3 + resolution: "estree-walker@npm:3.0.3" + dependencies: + "@types/estree": "npm:^1.0.0" + checksum: 10c0/c12e3c2b2642d2bcae7d5aa495c60fa2f299160946535763969a1c83fc74518ffa9c2cd3a8b69ac56aea547df6a8aac25f729a342992ef0bbac5f1c73e78995d + languageName: node + linkType: hard + +"esutils@npm:^2.0.2": + version: 2.0.3 + resolution: "esutils@npm:2.0.3" + checksum: 10c0/9a2fe69a41bfdade834ba7c42de4723c97ec776e40656919c62cbd13607c45e127a003f05f724a1ea55e5029a4cf2de444b13009f2af71271e42d93a637137c7 + languageName: node + linkType: hard + +"eta@npm:^2.2.0": + version: 2.2.0 + resolution: "eta@npm:2.2.0" + checksum: 10c0/643b54d9539d2761bf6c5f4f48df1a5ea2d46c7f5a5fdc47a7d4802a8aa2b6262d4d61f724452e226c18cf82db02d48e65293fcc548f26a3f9d75a5ba7c3b859 + languageName: node + linkType: hard + +"etag@npm:~1.8.1": + version: 1.8.1 + resolution: "etag@npm:1.8.1" + checksum: 10c0/12be11ef62fb9817314d790089a0a49fae4e1b50594135dcb8076312b7d7e470884b5100d249b28c18581b7fd52f8b485689ffae22a11ed9ec17377a33a08f84 + languageName: node + linkType: hard + +"eval@npm:^0.1.8": + version: 0.1.8 + resolution: "eval@npm:0.1.8" + dependencies: + "@types/node": "npm:*" + require-like: "npm:>= 0.1.1" + checksum: 10c0/258e700bff09e3ce3344273d5b6691b8ec5b043538d84f738f14d8b0aded33d64c00c15b380de725b1401b15f428ab35a9e7ca19a7d25f162c4f877c71586be9 + languageName: node + linkType: hard + +"eventemitter3@npm:^4.0.0": + version: 4.0.7 + resolution: "eventemitter3@npm:4.0.7" + checksum: 10c0/5f6d97cbcbac47be798e6355e3a7639a84ee1f7d9b199a07017f1d2f1e2fe236004d14fa5dfaeba661f94ea57805385e326236a6debbc7145c8877fbc0297c6b + languageName: node + linkType: hard + +"events@npm:^3.2.0": + version: 3.3.0 + resolution: "events@npm:3.3.0" + checksum: 10c0/d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 + languageName: node + linkType: hard + +"execa@npm:^5.0.0": + version: 5.1.1 + resolution: "execa@npm:5.1.1" + dependencies: + cross-spawn: "npm:^7.0.3" + get-stream: "npm:^6.0.0" + human-signals: "npm:^2.1.0" + is-stream: "npm:^2.0.0" + merge-stream: "npm:^2.0.0" + npm-run-path: "npm:^4.0.1" + onetime: "npm:^5.1.2" + signal-exit: "npm:^3.0.3" + strip-final-newline: "npm:^2.0.0" + checksum: 10c0/c8e615235e8de4c5addf2fa4c3da3e3aa59ce975a3e83533b4f6a71750fb816a2e79610dc5f1799b6e28976c9ae86747a36a606655bf8cb414a74d8d507b304f + languageName: node + linkType: hard + +"expand-template@npm:^2.0.3": + version: 2.0.3 + resolution: "expand-template@npm:2.0.3" + checksum: 10c0/1c9e7afe9acadf9d373301d27f6a47b34e89b3391b1ef38b7471d381812537ef2457e620ae7f819d2642ce9c43b189b3583813ec395e2938319abe356a9b2f51 + languageName: node + linkType: hard + +"exponential-backoff@npm:^3.1.1": + version: 3.1.1 + resolution: "exponential-backoff@npm:3.1.1" + checksum: 10c0/160456d2d647e6019640bd07111634d8c353038d9fa40176afb7cd49b0548bdae83b56d05e907c2cce2300b81cae35d800ef92fefb9d0208e190fa3b7d6bb579 + languageName: node + linkType: hard + +"express@npm:^4.17.3": + version: 4.21.0 + resolution: "express@npm:4.21.0" + dependencies: + accepts: "npm:~1.3.8" + array-flatten: "npm:1.1.1" + body-parser: "npm:1.20.3" + content-disposition: "npm:0.5.4" + content-type: "npm:~1.0.4" + cookie: "npm:0.6.0" + cookie-signature: "npm:1.0.6" + debug: "npm:2.6.9" + depd: "npm:2.0.0" + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + etag: "npm:~1.8.1" + finalhandler: "npm:1.3.1" + fresh: "npm:0.5.2" + http-errors: "npm:2.0.0" + merge-descriptors: "npm:1.0.3" + methods: "npm:~1.1.2" + on-finished: "npm:2.4.1" + parseurl: "npm:~1.3.3" + path-to-regexp: "npm:0.1.10" + proxy-addr: "npm:~2.0.7" + qs: "npm:6.13.0" + range-parser: "npm:~1.2.1" + safe-buffer: "npm:5.2.1" + send: "npm:0.19.0" + serve-static: "npm:1.16.2" + setprototypeof: "npm:1.2.0" + statuses: "npm:2.0.1" + type-is: "npm:~1.6.18" + utils-merge: "npm:1.0.1" + vary: "npm:~1.1.2" + checksum: 10c0/4cf7ca328f3fdeb720f30ccb2ea7708bfa7d345f9cc460b64a82bf1b2c91e5b5852ba15a9a11b2a165d6089acf83457fc477dc904d59cd71ed34c7a91762c6cc + languageName: node + linkType: hard + +"extend-shallow@npm:^2.0.1": + version: 2.0.1 + resolution: "extend-shallow@npm:2.0.1" + dependencies: + is-extendable: "npm:^0.1.0" + checksum: 10c0/ee1cb0a18c9faddb42d791b2d64867bd6cfd0f3affb711782eb6e894dd193e2934a7f529426aac7c8ddb31ac5d38000a00aa2caf08aa3dfc3e1c8ff6ba340bd9 + languageName: node + linkType: hard + +"extend@npm:^3.0.0": + version: 3.0.2 + resolution: "extend@npm:3.0.2" + checksum: 10c0/73bf6e27406e80aa3e85b0d1c4fd987261e628064e170ca781125c0b635a3dabad5e05adbf07595ea0cf1e6c5396cacb214af933da7cbaf24fe75ff14818e8f9 + languageName: node + linkType: hard + +"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 + languageName: node + linkType: hard + +"fast-diff@npm:^1.1.2": + version: 1.3.0 + resolution: "fast-diff@npm:1.3.0" + checksum: 10c0/5c19af237edb5d5effda008c891a18a585f74bf12953be57923f17a3a4d0979565fc64dbc73b9e20926b9d895f5b690c618cbb969af0cf022e3222471220ad29 + languageName: node + linkType: hard + +"fast-fifo@npm:^1.2.0, fast-fifo@npm:^1.3.2": + version: 1.3.2 + resolution: "fast-fifo@npm:1.3.2" + checksum: 10c0/d53f6f786875e8b0529f784b59b4b05d4b5c31c651710496440006a398389a579c8dbcd2081311478b5bf77f4b0b21de69109c5a4eabea9d8e8783d1eb864e4c + languageName: node + linkType: hard + +"fast-glob@npm:^3.2.11, fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.0, fast-glob@npm:^3.3.2": + version: 3.3.2 + resolution: "fast-glob@npm:3.3.2" + dependencies: + "@nodelib/fs.stat": "npm:^2.0.2" + "@nodelib/fs.walk": "npm:^1.2.3" + glob-parent: "npm:^5.1.2" + merge2: "npm:^1.3.0" + micromatch: "npm:^4.0.4" + checksum: 10c0/42baad7b9cd40b63e42039132bde27ca2cb3a4950d0a0f9abe4639ea1aa9d3e3b40f98b1fe31cbc0cc17b664c9ea7447d911a152fa34ec5b72977b125a6fc845 + languageName: node + linkType: hard + +"fast-json-stable-stringify@npm:^2.0.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: 10c0/7f081eb0b8a64e0057b3bb03f974b3ef00135fbf36c1c710895cd9300f13c94ba809bb3a81cf4e1b03f6e5285610a61abbd7602d0652de423144dfee5a389c9b + languageName: node + linkType: hard + +"fast-levenshtein@npm:^2.0.6": + version: 2.0.6 + resolution: "fast-levenshtein@npm:2.0.6" + checksum: 10c0/111972b37338bcb88f7d9e2c5907862c280ebf4234433b95bc611e518d192ccb2d38119c4ac86e26b668d75f7f3894f4ff5c4982899afced7ca78633b08287c4 + languageName: node + linkType: hard + +"fast-uri@npm:^3.0.1": + version: 3.0.1 + resolution: "fast-uri@npm:3.0.1" + checksum: 10c0/3cd46d6006083b14ca61ffe9a05b8eef75ef87e9574b6f68f2e17ecf4daa7aaadeff44e3f0f7a0ef4e0f7e7c20fc07beec49ff14dc72d0b500f00386592f2d10 + languageName: node + linkType: hard + +"fast-url-parser@npm:1.1.3": + version: 1.1.3 + resolution: "fast-url-parser@npm:1.1.3" + dependencies: + punycode: "npm:^1.3.2" + checksum: 10c0/d85c5c409cf0215417380f98a2d29c23a95004d93ff0d8bdf1af5f1a9d1fc608ac89ac6ffe863783d2c73efb3850dd35390feb1de3296f49877bfee0392eb5d3 + languageName: node + linkType: hard + +"fastq@npm:^1.6.0": + version: 1.17.1 + resolution: "fastq@npm:1.17.1" + dependencies: + reusify: "npm:^1.0.4" + checksum: 10c0/1095f16cea45fb3beff558bb3afa74ca7a9250f5a670b65db7ed585f92b4b48381445cd328b3d87323da81e43232b5d5978a8201bde84e0cd514310f1ea6da34 + languageName: node + linkType: hard + +"fault@npm:^2.0.0": + version: 2.0.1 + resolution: "fault@npm:2.0.1" + dependencies: + format: "npm:^0.2.0" + checksum: 10c0/b80fbf1019b9ce8b08ee09ce86e02b028563e13a32ac3be34e42bfac00a97b96d8dee6d31e26578ffc16224eb6729e01ff1f97ddfeee00494f4f56c0aeed4bdd + languageName: node + linkType: hard + +"faye-websocket@npm:^0.11.3": + version: 0.11.4 + resolution: "faye-websocket@npm:0.11.4" + dependencies: + websocket-driver: "npm:>=0.5.1" + checksum: 10c0/c6052a0bb322778ce9f89af92890f6f4ce00d5ec92418a35e5f4c6864a4fe736fec0bcebd47eac7c0f0e979b01530746b1c85c83cb04bae789271abf19737420 + languageName: node + linkType: hard + +"feed@npm:^4.2.2": + version: 4.2.2 + resolution: "feed@npm:4.2.2" + dependencies: + xml-js: "npm:^1.6.11" + checksum: 10c0/c0849bde569da94493224525db00614fd1855a5d7c2e990f6e8637bd0298e85c3d329efe476cba77e711e438c3fb48af60cd5ef0c409da5bcd1f479790b0a372 + languageName: node + linkType: hard + +"file-entry-cache@npm:^6.0.1": + version: 6.0.1 + resolution: "file-entry-cache@npm:6.0.1" + dependencies: + flat-cache: "npm:^3.0.4" + checksum: 10c0/58473e8a82794d01b38e5e435f6feaf648e3f36fdb3a56e98f417f4efae71ad1c0d4ebd8a9a7c50c3ad085820a93fc7494ad721e0e4ebc1da3573f4e1c3c7cdd + languageName: node + linkType: hard + +"file-loader@npm:^6.2.0": + version: 6.2.0 + resolution: "file-loader@npm:6.2.0" + dependencies: + loader-utils: "npm:^2.0.0" + schema-utils: "npm:^3.0.0" + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + checksum: 10c0/e176a57c2037ab0f78e5755dbf293a6b7f0f8392350a120bd03cc2ce2525bea017458ba28fea14ca535ff1848055e86d1a3a216bdb2561ef33395b27260a1dd3 + languageName: node + linkType: hard + +"filesize@npm:^8.0.6": + version: 8.0.7 + resolution: "filesize@npm:8.0.7" + checksum: 10c0/82072d94816484df5365d4d5acbb2327a65dc49704c64e403e8c40d8acb7364de1cf1e65cb512c77a15d353870f73e4fed46dad5c6153d0618d9ce7a64d09cfc + languageName: node + linkType: hard + +"fill-range@npm:^7.1.1": + version: 7.1.1 + resolution: "fill-range@npm:7.1.1" + dependencies: + to-regex-range: "npm:^5.0.1" + checksum: 10c0/b75b691bbe065472f38824f694c2f7449d7f5004aa950426a2c28f0306c60db9b880c0b0e4ed819997ffb882d1da02cfcfc819bddc94d71627f5269682edf018 + languageName: node + linkType: hard + +"finalhandler@npm:1.3.1": + version: 1.3.1 + resolution: "finalhandler@npm:1.3.1" + dependencies: + debug: "npm:2.6.9" + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + on-finished: "npm:2.4.1" + parseurl: "npm:~1.3.3" + statuses: "npm:2.0.1" + unpipe: "npm:~1.0.0" + checksum: 10c0/d38035831865a49b5610206a3a9a9aae4e8523cbbcd01175d0480ffbf1278c47f11d89be3ca7f617ae6d94f29cf797546a4619cd84dd109009ef33f12f69019f + languageName: node + linkType: hard + +"find-cache-dir@npm:^4.0.0": + version: 4.0.0 + resolution: "find-cache-dir@npm:4.0.0" + dependencies: + common-path-prefix: "npm:^3.0.0" + pkg-dir: "npm:^7.0.0" + checksum: 10c0/0faa7956974726c8769671de696d24c643ca1e5b8f7a2401283caa9e07a5da093293e0a0f4bd18c920ec981d2ef945c7f5b946cde268dfc9077d833ad0293cff + languageName: node + linkType: hard + +"find-up@npm:^3.0.0": + version: 3.0.0 + resolution: "find-up@npm:3.0.0" + dependencies: + locate-path: "npm:^3.0.0" + checksum: 10c0/2c2e7d0a26db858e2f624f39038c74739e38306dee42b45f404f770db357947be9d0d587f1cac72d20c114deb38aa57316e879eb0a78b17b46da7dab0a3bd6e3 + languageName: node + linkType: hard + +"find-up@npm:^5.0.0": + version: 5.0.0 + resolution: "find-up@npm:5.0.0" + dependencies: + locate-path: "npm:^6.0.0" + path-exists: "npm:^4.0.0" + checksum: 10c0/062c5a83a9c02f53cdd6d175a37ecf8f87ea5bbff1fdfb828f04bfa021441bc7583e8ebc0872a4c1baab96221fb8a8a275a19809fb93fbc40bd69ec35634069a + languageName: node + linkType: hard + +"find-up@npm:^6.3.0": + version: 6.3.0 + resolution: "find-up@npm:6.3.0" + dependencies: + locate-path: "npm:^7.1.0" + path-exists: "npm:^5.0.0" + checksum: 10c0/07e0314362d316b2b13f7f11ea4692d5191e718ca3f7264110127520f3347996349bf9e16805abae3e196805814bc66ef4bff2b8904dc4a6476085fc9b0eba07 + languageName: node + linkType: hard + +"flat-cache@npm:^3.0.4": + version: 3.2.0 + resolution: "flat-cache@npm:3.2.0" + dependencies: + flatted: "npm:^3.2.9" + keyv: "npm:^4.5.3" + rimraf: "npm:^3.0.2" + checksum: 10c0/b76f611bd5f5d68f7ae632e3ae503e678d205cf97a17c6ab5b12f6ca61188b5f1f7464503efae6dc18683ed8f0b41460beb48ac4b9ac63fe6201296a91ba2f75 + languageName: node + linkType: hard + +"flat@npm:^5.0.2": + version: 5.0.2 + resolution: "flat@npm:5.0.2" + bin: + flat: cli.js + checksum: 10c0/f178b13482f0cd80c7fede05f4d10585b1f2fdebf26e12edc138e32d3150c6ea6482b7f12813a1091143bad52bb6d3596bca51a162257a21163c0ff438baa5fe + languageName: node + linkType: hard + +"flatted@npm:^3.2.9": + version: 3.3.1 + resolution: "flatted@npm:3.3.1" + checksum: 10c0/324166b125ee07d4ca9bcf3a5f98d915d5db4f39d711fba640a3178b959919aae1f7cfd8aabcfef5826ed8aa8a2aa14cc85b2d7d18ff638ddf4ae3df39573eaf + languageName: node + linkType: hard + +"follow-redirects@npm:^1.0.0": + version: 1.15.6 + resolution: "follow-redirects@npm:1.15.6" + peerDependenciesMeta: + debug: + optional: true + checksum: 10c0/9ff767f0d7be6aa6870c82ac79cf0368cd73e01bbc00e9eb1c2a16fbb198ec105e3c9b6628bb98e9f3ac66fe29a957b9645bcb9a490bb7aa0d35f908b6b85071 + languageName: node + linkType: hard + +"for-each@npm:^0.3.3": + version: 0.3.3 + resolution: "for-each@npm:0.3.3" + dependencies: + is-callable: "npm:^1.1.3" + checksum: 10c0/22330d8a2db728dbf003ec9182c2d421fbcd2969b02b4f97ec288721cda63eb28f2c08585ddccd0f77cb2930af8d958005c9e72f47141dc51816127a118f39aa + languageName: node + linkType: hard + +"foreground-child@npm:^3.1.0": + version: 3.3.0 + resolution: "foreground-child@npm:3.3.0" + dependencies: + cross-spawn: "npm:^7.0.0" + signal-exit: "npm:^4.0.1" + checksum: 10c0/028f1d41000553fcfa6c4bb5c372963bf3d9bf0b1f25a87d1a6253014343fb69dfb1b42d9625d7cf44c8ba429940f3d0ff718b62105d4d4a4f6ef8ca0a53faa2 + languageName: node + linkType: hard + +"fork-ts-checker-webpack-plugin@npm:^6.5.0": + version: 6.5.3 + resolution: "fork-ts-checker-webpack-plugin@npm:6.5.3" + dependencies: + "@babel/code-frame": "npm:^7.8.3" + "@types/json-schema": "npm:^7.0.5" + chalk: "npm:^4.1.0" + chokidar: "npm:^3.4.2" + cosmiconfig: "npm:^6.0.0" + deepmerge: "npm:^4.2.2" + fs-extra: "npm:^9.0.0" + glob: "npm:^7.1.6" + memfs: "npm:^3.1.2" + minimatch: "npm:^3.0.4" + schema-utils: "npm:2.7.0" + semver: "npm:^7.3.2" + tapable: "npm:^1.0.0" + peerDependencies: + eslint: ">= 6" + typescript: ">= 2.7" + vue-template-compiler: "*" + webpack: ">= 4" + peerDependenciesMeta: + eslint: + optional: true + vue-template-compiler: + optional: true + checksum: 10c0/0885ea75474de011d4068ca3e2d3ca6e4cd318f5cfa018e28ff8fef23ef3a1f1c130160ef192d3e5d31ef7b6fe9f8fb1d920eab5e9e449fb30ce5cc96647245c + languageName: node + linkType: hard + +"form-data-encoder@npm:^2.1.2": + version: 2.1.4 + resolution: "form-data-encoder@npm:2.1.4" + checksum: 10c0/4c06ae2b79ad693a59938dc49ebd020ecb58e4584860a90a230f80a68b026483b022ba5e4143cff06ae5ac8fd446a0b500fabc87bbac3d1f62f2757f8dabcaf7 + languageName: node + linkType: hard + +"format@npm:^0.2.0": + version: 0.2.2 + resolution: "format@npm:0.2.2" + checksum: 10c0/6032ba747541a43abf3e37b402b2f72ee08ebcb58bf84d816443dd228959837f1cddf1e8775b29fa27ff133f4bd146d041bfca5f9cf27f048edf3d493cf8fee6 + languageName: node + linkType: hard + +"forwarded@npm:0.2.0": + version: 0.2.0 + resolution: "forwarded@npm:0.2.0" + checksum: 10c0/9b67c3fac86acdbc9ae47ba1ddd5f2f81526fa4c8226863ede5600a3f7c7416ef451f6f1e240a3cc32d0fd79fcfe6beb08fd0da454f360032bde70bf80afbb33 + languageName: node + linkType: hard + +"fraction.js@npm:^4.3.7": + version: 4.3.7 + resolution: "fraction.js@npm:4.3.7" + checksum: 10c0/df291391beea9ab4c263487ffd9d17fed162dbb736982dee1379b2a8cc94e4e24e46ed508c6d278aded9080ba51872f1bc5f3a5fd8d7c74e5f105b508ac28711 + languageName: node + linkType: hard + +"fresh@npm:0.5.2": + version: 0.5.2 + resolution: "fresh@npm:0.5.2" + checksum: 10c0/c6d27f3ed86cc5b601404822f31c900dd165ba63fff8152a3ef714e2012e7535027063bc67ded4cb5b3a49fa596495d46cacd9f47d6328459cf570f08b7d9e5a + languageName: node + linkType: hard + +"fs-constants@npm:^1.0.0": + version: 1.0.0 + resolution: "fs-constants@npm:1.0.0" + checksum: 10c0/a0cde99085f0872f4d244e83e03a46aa387b74f5a5af750896c6b05e9077fac00e9932fdf5aef84f2f16634cd473c63037d7a512576da7d5c2b9163d1909f3a8 + languageName: node + linkType: hard + +"fs-extra@npm:^11.1.1, fs-extra@npm:^11.2.0": + version: 11.2.0 + resolution: "fs-extra@npm:11.2.0" + dependencies: + graceful-fs: "npm:^4.2.0" + jsonfile: "npm:^6.0.1" + universalify: "npm:^2.0.0" + checksum: 10c0/d77a9a9efe60532d2e790e938c81a02c1b24904ef7a3efb3990b835514465ba720e99a6ea56fd5e2db53b4695319b644d76d5a0e9988a2beef80aa7b1da63398 + languageName: node + linkType: hard + +"fs-extra@npm:^9.0.0": + version: 9.1.0 + resolution: "fs-extra@npm:9.1.0" + dependencies: + at-least-node: "npm:^1.0.0" + graceful-fs: "npm:^4.2.0" + jsonfile: "npm:^6.0.1" + universalify: "npm:^2.0.0" + checksum: 10c0/9b808bd884beff5cb940773018179a6b94a966381d005479f00adda6b44e5e3d4abf765135773d849cc27efe68c349e4a7b86acd7d3306d5932c14f3a4b17a92 + languageName: node + linkType: hard + +"fs-minipass@npm:^2.0.0": + version: 2.1.0 + resolution: "fs-minipass@npm:2.1.0" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/703d16522b8282d7299337539c3ed6edddd1afe82435e4f5b76e34a79cd74e488a8a0e26a636afc2440e1a23b03878e2122e3a2cfe375a5cf63c37d92b86a004 + languageName: node + linkType: hard + +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 + languageName: node + linkType: hard + +"fs-monkey@npm:^1.0.4": + version: 1.0.6 + resolution: "fs-monkey@npm:1.0.6" + checksum: 10c0/6f2508e792a47e37b7eabd5afc79459c1ea72bce2a46007d2b7ed0bfc3a4d64af38975c6eb7e93edb69ac98bbb907c13ff1b1579b2cf52d3d02dbc0303fca79f + languageName: node + linkType: hard + +"fs.realpath@npm:^1.0.0": + version: 1.0.0 + resolution: "fs.realpath@npm:1.0.0" + checksum: 10c0/444cf1291d997165dfd4c0d58b69f0e4782bfd9149fd72faa4fe299e68e0e93d6db941660b37dd29153bf7186672ececa3b50b7e7249477b03fdf850f287c948 + languageName: node + linkType: hard + +"fsevents@npm:~2.3.2": + version: 2.3.3 + resolution: "fsevents@npm:2.3.3" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/a1f0c44595123ed717febbc478aa952e47adfc28e2092be66b8ab1635147254ca6cfe1df792a8997f22716d4cbafc73309899ff7bfac2ac3ad8cf2e4ecc3ec60 + conditions: os=darwin + languageName: node + linkType: hard + +"fsevents@patch:fsevents@npm%3A~2.3.2#optional!builtin": + version: 2.3.3 + resolution: "fsevents@patch:fsevents@npm%3A2.3.3#optional!builtin::version=2.3.3&hash=df0bf1" + dependencies: + node-gyp: "npm:latest" + conditions: os=darwin + languageName: node + linkType: hard + +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: 10c0/d8680ee1e5fcd4c197e4ac33b2b4dce03c71f4d91717292785703db200f5c21f977c568d28061226f9b5900cbcd2c84463646134fd5337e7925e0942bc3f46d5 + languageName: node + linkType: hard + +"function.prototype.name@npm:^1.1.6": + version: 1.1.6 + resolution: "function.prototype.name@npm:1.1.6" + dependencies: + call-bind: "npm:^1.0.2" + define-properties: "npm:^1.2.0" + es-abstract: "npm:^1.22.1" + functions-have-names: "npm:^1.2.3" + checksum: 10c0/9eae11294905b62cb16874adb4fc687927cda3162285e0ad9612e6a1d04934005d46907362ea9cdb7428edce05a2f2c3dabc3b2d21e9fd343e9bb278230ad94b + languageName: node + linkType: hard + +"functions-have-names@npm:^1.2.3": + version: 1.2.3 + resolution: "functions-have-names@npm:1.2.3" + checksum: 10c0/33e77fd29bddc2d9bb78ab3eb854c165909201f88c75faa8272e35899e2d35a8a642a15e7420ef945e1f64a9670d6aa3ec744106b2aa42be68ca5114025954ca + languageName: node + linkType: hard + +"gensync@npm:^1.0.0-beta.2": + version: 1.0.0-beta.2 + resolution: "gensync@npm:1.0.0-beta.2" + checksum: 10c0/782aba6cba65b1bb5af3b095d96249d20edbe8df32dbf4696fd49be2583faf676173bf4809386588828e4dd76a3354fcbeb577bab1c833ccd9fc4577f26103f8 + languageName: node + linkType: hard + +"get-intrinsic@npm:^1.1.3, get-intrinsic@npm:^1.2.1, get-intrinsic@npm:^1.2.3, get-intrinsic@npm:^1.2.4": + version: 1.2.4 + resolution: "get-intrinsic@npm:1.2.4" + dependencies: + es-errors: "npm:^1.3.0" + function-bind: "npm:^1.1.2" + has-proto: "npm:^1.0.1" + has-symbols: "npm:^1.0.3" + hasown: "npm:^2.0.0" + checksum: 10c0/0a9b82c16696ed6da5e39b1267104475c47e3a9bdbe8b509dfe1710946e38a87be70d759f4bb3cda042d76a41ef47fe769660f3b7c0d1f68750299344ffb15b7 + languageName: node + linkType: hard + +"get-own-enumerable-property-symbols@npm:^3.0.0": + version: 3.0.2 + resolution: "get-own-enumerable-property-symbols@npm:3.0.2" + checksum: 10c0/103999855f3d1718c631472437161d76962cbddcd95cc642a34c07bfb661ed41b6c09a9c669ccdff89ee965beb7126b80eec7b2101e20e31e9cc6c4725305e10 + languageName: node + linkType: hard + +"get-stream@npm:^6.0.0, get-stream@npm:^6.0.1": + version: 6.0.1 + resolution: "get-stream@npm:6.0.1" + checksum: 10c0/49825d57d3fd6964228e6200a58169464b8e8970489b3acdc24906c782fb7f01f9f56f8e6653c4a50713771d6658f7cfe051e5eb8c12e334138c9c918b296341 + languageName: node + linkType: hard + +"get-symbol-description@npm:^1.0.2": + version: 1.0.2 + resolution: "get-symbol-description@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.5" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.4" + checksum: 10c0/867be6d63f5e0eb026cb3b0ef695ec9ecf9310febb041072d2e142f260bd91ced9eeb426b3af98791d1064e324e653424afa6fd1af17dee373bea48ae03162bc + languageName: node + linkType: hard + +"github-from-package@npm:0.0.0": + version: 0.0.0 + resolution: "github-from-package@npm:0.0.0" + checksum: 10c0/737ee3f52d0a27e26332cde85b533c21fcdc0b09fb716c3f8e522cfaa9c600d4a631dec9fcde179ec9d47cca89017b7848ed4d6ae6b6b78f936c06825b1fcc12 + languageName: node + linkType: hard + +"github-slugger@npm:^1.5.0": + version: 1.5.0 + resolution: "github-slugger@npm:1.5.0" + checksum: 10c0/116f99732925f939cbfd6f2e57db1aa7e111a460db0d103e3b3f2fce6909d44311663d4542350706cad806345b9892358cc3b153674f88eeae77f43380b3bfca + languageName: node + linkType: hard + +"glob-parent@npm:^5.1.2, glob-parent@npm:~5.1.2": + version: 5.1.2 + resolution: "glob-parent@npm:5.1.2" + dependencies: + is-glob: "npm:^4.0.1" + checksum: 10c0/cab87638e2112bee3f839ef5f6e0765057163d39c66be8ec1602f3823da4692297ad4e972de876ea17c44d652978638d2fd583c6713d0eb6591706825020c9ee + languageName: node + linkType: hard + +"glob-parent@npm:^6.0.1, glob-parent@npm:^6.0.2": + version: 6.0.2 + resolution: "glob-parent@npm:6.0.2" + dependencies: + is-glob: "npm:^4.0.3" + checksum: 10c0/317034d88654730230b3f43bb7ad4f7c90257a426e872ea0bf157473ac61c99bf5d205fad8f0185f989be8d2fa6d3c7dce1645d99d545b6ea9089c39f838e7f8 + languageName: node + linkType: hard + +"glob-to-regexp@npm:^0.4.1": + version: 0.4.1 + resolution: "glob-to-regexp@npm:0.4.1" + checksum: 10c0/0486925072d7a916f052842772b61c3e86247f0a80cc0deb9b5a3e8a1a9faad5b04fb6f58986a09f34d3e96cd2a22a24b7e9882fb1cf904c31e9a310de96c429 + languageName: node + linkType: hard + +"glob@npm:^10.0.0, glob@npm:^10.2.2, glob@npm:^10.3.10": + version: 10.4.5 + resolution: "glob@npm:10.4.5" + dependencies: + foreground-child: "npm:^3.1.0" + jackspeak: "npm:^3.1.2" + minimatch: "npm:^9.0.4" + minipass: "npm:^7.1.2" + package-json-from-dist: "npm:^1.0.0" + path-scurry: "npm:^1.11.1" + bin: + glob: dist/esm/bin.mjs + checksum: 10c0/19a9759ea77b8e3ca0a43c2f07ecddc2ad46216b786bb8f993c445aee80d345925a21e5280c7b7c6c59e860a0154b84e4b2b60321fea92cd3c56b4a7489f160e + languageName: node + linkType: hard + +"glob@npm:^7.0.0, glob@npm:^7.1.3, glob@npm:^7.1.6": + version: 7.2.3 + resolution: "glob@npm:7.2.3" + dependencies: + fs.realpath: "npm:^1.0.0" + inflight: "npm:^1.0.4" + inherits: "npm:2" + minimatch: "npm:^3.1.1" + once: "npm:^1.3.0" + path-is-absolute: "npm:^1.0.0" + checksum: 10c0/65676153e2b0c9095100fe7f25a778bf45608eeb32c6048cf307f579649bcc30353277b3b898a3792602c65764e5baa4f643714dfbdfd64ea271d210c7a425fe + languageName: node + linkType: hard + +"global-dirs@npm:^3.0.0": + version: 3.0.1 + resolution: "global-dirs@npm:3.0.1" + dependencies: + ini: "npm:2.0.0" + checksum: 10c0/ef65e2241a47ff978f7006a641302bc7f4c03dfb98783d42bf7224c136e3a06df046e70ee3a010cf30214114755e46c9eb5eb1513838812fbbe0d92b14c25080 + languageName: node + linkType: hard + +"global-modules@npm:^2.0.0": + version: 2.0.0 + resolution: "global-modules@npm:2.0.0" + dependencies: + global-prefix: "npm:^3.0.0" + checksum: 10c0/43b770fe24aa6028f4b9770ea583a47f39750be15cf6e2578f851e4ccc9e4fa674b8541928c0b09c21461ca0763f0d36e4068cec86c914b07fd6e388e66ba5b9 + languageName: node + linkType: hard + +"global-prefix@npm:^3.0.0": + version: 3.0.0 + resolution: "global-prefix@npm:3.0.0" + dependencies: + ini: "npm:^1.3.5" + kind-of: "npm:^6.0.2" + which: "npm:^1.3.1" + checksum: 10c0/510f489fb68d1cc7060f276541709a0ee6d41356ef852de48f7906c648ac223082a1cc8fce86725ca6c0e032bcdc1189ae77b4744a624b29c34a9d0ece498269 + languageName: node + linkType: hard + +"globals@npm:^11.1.0": + version: 11.12.0 + resolution: "globals@npm:11.12.0" + checksum: 10c0/758f9f258e7b19226bd8d4af5d3b0dcf7038780fb23d82e6f98932c44e239f884847f1766e8fa9cc5635ccb3204f7fa7314d4408dd4002a5e8ea827b4018f0a1 + languageName: node + linkType: hard + +"globals@npm:^13.19.0": + version: 13.24.0 + resolution: "globals@npm:13.24.0" + dependencies: + type-fest: "npm:^0.20.2" + checksum: 10c0/d3c11aeea898eb83d5ec7a99508600fbe8f83d2cf00cbb77f873dbf2bcb39428eff1b538e4915c993d8a3b3473fa71eeebfe22c9bb3a3003d1e26b1f2c8a42cd + languageName: node + linkType: hard + +"globals@npm:^15.9.0": + version: 15.9.0 + resolution: "globals@npm:15.9.0" + checksum: 10c0/de4b553e412e7e830998578d51b605c492256fb2a9273eaeec6ec9ee519f1c5aa50de57e3979911607fd7593a4066420e01d8c3d551e7a6a236e96c521aee36c + languageName: node + linkType: hard + +"globalthis@npm:^1.0.3": + version: 1.0.4 + resolution: "globalthis@npm:1.0.4" + dependencies: + define-properties: "npm:^1.2.1" + gopd: "npm:^1.0.1" + checksum: 10c0/9d156f313af79d80b1566b93e19285f481c591ad6d0d319b4be5e03750d004dde40a39a0f26f7e635f9007a3600802f53ecd85a759b86f109e80a5f705e01846 + languageName: node + linkType: hard + +"globby@npm:^11.0.1, globby@npm:^11.0.4, globby@npm:^11.1.0": + version: 11.1.0 + resolution: "globby@npm:11.1.0" + dependencies: + array-union: "npm:^2.1.0" + dir-glob: "npm:^3.0.1" + fast-glob: "npm:^3.2.9" + ignore: "npm:^5.2.0" + merge2: "npm:^1.4.1" + slash: "npm:^3.0.0" + checksum: 10c0/b39511b4afe4bd8a7aead3a27c4ade2b9968649abab0a6c28b1a90141b96ca68ca5db1302f7c7bd29eab66bf51e13916b8e0a3d0ac08f75e1e84a39b35691189 + languageName: node + linkType: hard + +"globby@npm:^13.1.1": + version: 13.2.2 + resolution: "globby@npm:13.2.2" + dependencies: + dir-glob: "npm:^3.0.1" + fast-glob: "npm:^3.3.0" + ignore: "npm:^5.2.4" + merge2: "npm:^1.4.1" + slash: "npm:^4.0.0" + checksum: 10c0/a8d7cc7cbe5e1b2d0f81d467bbc5bc2eac35f74eaded3a6c85fc26d7acc8e6de22d396159db8a2fc340b8a342e74cac58de8f4aee74146d3d146921a76062664 + languageName: node + linkType: hard + +"gopd@npm:^1.0.1": + version: 1.0.1 + resolution: "gopd@npm:1.0.1" + dependencies: + get-intrinsic: "npm:^1.1.3" + checksum: 10c0/505c05487f7944c552cee72087bf1567debb470d4355b1335f2c262d218ebbff805cd3715448fe29b4b380bae6912561d0467233e4165830efd28da241418c63 + languageName: node + linkType: hard + +"got@npm:^12.1.0": + version: 12.6.1 + resolution: "got@npm:12.6.1" + dependencies: + "@sindresorhus/is": "npm:^5.2.0" + "@szmarczak/http-timer": "npm:^5.0.1" + cacheable-lookup: "npm:^7.0.0" + cacheable-request: "npm:^10.2.8" + decompress-response: "npm:^6.0.0" + form-data-encoder: "npm:^2.1.2" + get-stream: "npm:^6.0.1" + http2-wrapper: "npm:^2.1.10" + lowercase-keys: "npm:^3.0.0" + p-cancelable: "npm:^3.0.0" + responselike: "npm:^3.0.0" + checksum: 10c0/2fe97fcbd7a9ffc7c2d0ecf59aca0a0562e73a7749cadada9770eeb18efbdca3086262625fb65590594edc220a1eca58fab0d26b0c93c2f9a008234da71ca66b + languageName: node + linkType: hard + +"graceful-fs@npm:4.2.10": + version: 4.2.10 + resolution: "graceful-fs@npm:4.2.10" + checksum: 10c0/4223a833e38e1d0d2aea630c2433cfb94ddc07dfc11d511dbd6be1d16688c5be848acc31f9a5d0d0ddbfb56d2ee5a6ae0278aceeb0ca6a13f27e06b9956fb952 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.11, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 + languageName: node + linkType: hard + +"graphemer@npm:^1.4.0": + version: 1.4.0 + resolution: "graphemer@npm:1.4.0" + checksum: 10c0/e951259d8cd2e0d196c72ec711add7115d42eb9a8146c8eeda5b8d3ac91e5dd816b9cd68920726d9fd4490368e7ed86e9c423f40db87e2d8dfafa00fa17c3a31 + languageName: node + linkType: hard + +"gray-matter@npm:^4.0.3": + version: 4.0.3 + resolution: "gray-matter@npm:4.0.3" + dependencies: + js-yaml: "npm:^3.13.1" + kind-of: "npm:^6.0.2" + section-matter: "npm:^1.0.0" + strip-bom-string: "npm:^1.0.0" + checksum: 10c0/e38489906dad4f162ca01e0dcbdbed96d1a53740cef446b9bf76d80bec66fa799af07776a18077aee642346c5e1365ed95e4c91854a12bf40ba0d4fb43a625a6 + languageName: node + linkType: hard + +"gzip-size@npm:^6.0.0": + version: 6.0.0 + resolution: "gzip-size@npm:6.0.0" + dependencies: + duplexer: "npm:^0.1.2" + checksum: 10c0/4ccb924626c82125897a997d1c84f2377846a6ef57fbee38f7c0e6b41387fba4d00422274440747b58008b5d60114bac2349c2908e9aba55188345281af40a3f + languageName: node + linkType: hard + +"handle-thing@npm:^2.0.0": + version: 2.0.1 + resolution: "handle-thing@npm:2.0.1" + checksum: 10c0/7ae34ba286a3434f1993ebd1cc9c9e6b6d8ea672182db28b1afc0a7119229552fa7031e3e5f3cd32a76430ece4e94b7da6f12af2eb39d6239a7693e4bd63a998 + languageName: node + linkType: hard + +"has-ansi@npm:^2.0.0": + version: 2.0.0 + resolution: "has-ansi@npm:2.0.0" + dependencies: + ansi-regex: "npm:^2.0.0" + checksum: 10c0/f54e4887b9f8f3c4bfefd649c48825b3c093987c92c27880ee9898539e6f01aed261e82e73153c3f920fde0db5bf6ebd58deb498ed1debabcb4bc40113ccdf05 + languageName: node + linkType: hard + +"has-bigints@npm:^1.0.1, has-bigints@npm:^1.0.2": + version: 1.0.2 + resolution: "has-bigints@npm:1.0.2" + checksum: 10c0/724eb1485bfa3cdff6f18d95130aa190561f00b3fcf9f19dc640baf8176b5917c143b81ec2123f8cddb6c05164a198c94b13e1377c497705ccc8e1a80306e83b + languageName: node + linkType: hard + +"has-flag@npm:^3.0.0": + version: 3.0.0 + resolution: "has-flag@npm:3.0.0" + checksum: 10c0/1c6c83b14b8b1b3c25b0727b8ba3e3b647f99e9e6e13eb7322107261de07a4c1be56fc0d45678fc376e09772a3a1642ccdaf8fc69bdf123b6c086598397ce473 + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 + languageName: node + linkType: hard + +"has-property-descriptors@npm:^1.0.0, has-property-descriptors@npm:^1.0.2": + version: 1.0.2 + resolution: "has-property-descriptors@npm:1.0.2" + dependencies: + es-define-property: "npm:^1.0.0" + checksum: 10c0/253c1f59e80bb476cf0dde8ff5284505d90c3bdb762983c3514d36414290475fe3fd6f574929d84de2a8eec00d35cf07cb6776205ff32efd7c50719125f00236 + languageName: node + linkType: hard + +"has-proto@npm:^1.0.1, has-proto@npm:^1.0.3": + version: 1.0.3 + resolution: "has-proto@npm:1.0.3" + checksum: 10c0/35a6989f81e9f8022c2f4027f8b48a552de714938765d019dbea6bb547bd49ce5010a3c7c32ec6ddac6e48fc546166a3583b128f5a7add8b058a6d8b4afec205 + languageName: node + linkType: hard + +"has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": + version: 1.0.3 + resolution: "has-symbols@npm:1.0.3" + checksum: 10c0/e6922b4345a3f37069cdfe8600febbca791c94988c01af3394d86ca3360b4b93928bbf395859158f88099cb10b19d98e3bbab7c9ff2c1bd09cf665ee90afa2c3 + languageName: node + linkType: hard + +"has-tostringtag@npm:^1.0.0, has-tostringtag@npm:^1.0.2": + version: 1.0.2 + resolution: "has-tostringtag@npm:1.0.2" + dependencies: + has-symbols: "npm:^1.0.3" + checksum: 10c0/a8b166462192bafe3d9b6e420a1d581d93dd867adb61be223a17a8d6dad147aa77a8be32c961bb2f27b3ef893cae8d36f564ab651f5e9b7938ae86f74027c48c + languageName: node + linkType: hard + +"has-yarn@npm:^3.0.0": + version: 3.0.0 + resolution: "has-yarn@npm:3.0.0" + checksum: 10c0/38c76618cb764e4a98ea114a3938e0bed6ceafb6bacab2ffb32e7c7d1e18b5e09cd03387d507ee87072388e1f20b1f80947fee62c41fc450edfbbdc02a665787 + languageName: node + linkType: hard + +"hasown@npm:^2.0.0, hasown@npm:^2.0.1, hasown@npm:^2.0.2": + version: 2.0.2 + resolution: "hasown@npm:2.0.2" + dependencies: + function-bind: "npm:^1.1.2" + checksum: 10c0/3769d434703b8ac66b209a4cca0737519925bbdb61dd887f93a16372b14694c63ff4e797686d87c90f08168e81082248b9b028bad60d4da9e0d1148766f56eb9 + languageName: node + linkType: hard + +"hast-util-from-parse5@npm:^8.0.0": + version: 8.0.1 + resolution: "hast-util-from-parse5@npm:8.0.1" + dependencies: + "@types/hast": "npm:^3.0.0" + "@types/unist": "npm:^3.0.0" + devlop: "npm:^1.0.0" + hastscript: "npm:^8.0.0" + property-information: "npm:^6.0.0" + vfile: "npm:^6.0.0" + vfile-location: "npm:^5.0.0" + web-namespaces: "npm:^2.0.0" + checksum: 10c0/4a30bb885cff1f0e023c429ae3ece73fe4b03386f07234bf23f5555ca087c2573ff4e551035b417ed7615bde559f394cdaf1db2b91c3b7f0575f3563cd238969 + languageName: node + linkType: hard + +"hast-util-parse-selector@npm:^4.0.0": + version: 4.0.0 + resolution: "hast-util-parse-selector@npm:4.0.0" + dependencies: + "@types/hast": "npm:^3.0.0" + checksum: 10c0/5e98168cb44470dc274aabf1a28317e4feb09b1eaf7a48bbaa8c1de1b43a89cd195cb1284e535698e658e3ec26ad91bc5e52c9563c36feb75abbc68aaf68fb9f + languageName: node + linkType: hard + +"hast-util-raw@npm:^9.0.0": + version: 9.0.4 + resolution: "hast-util-raw@npm:9.0.4" + dependencies: + "@types/hast": "npm:^3.0.0" + "@types/unist": "npm:^3.0.0" + "@ungap/structured-clone": "npm:^1.0.0" + hast-util-from-parse5: "npm:^8.0.0" + hast-util-to-parse5: "npm:^8.0.0" + html-void-elements: "npm:^3.0.0" + mdast-util-to-hast: "npm:^13.0.0" + parse5: "npm:^7.0.0" + unist-util-position: "npm:^5.0.0" + unist-util-visit: "npm:^5.0.0" + vfile: "npm:^6.0.0" + web-namespaces: "npm:^2.0.0" + zwitch: "npm:^2.0.0" + checksum: 10c0/03d0fe7ba8bd75c9ce81f829650b19b78917bbe31db70d36bf6f136842496c3474e3bb1841f2d30dafe1f6b561a89a524185492b9a93d40b131000743c0d7998 + languageName: node + linkType: hard + +"hast-util-to-estree@npm:^3.0.0": + version: 3.1.0 + resolution: "hast-util-to-estree@npm:3.1.0" + dependencies: + "@types/estree": "npm:^1.0.0" + "@types/estree-jsx": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + comma-separated-tokens: "npm:^2.0.0" + devlop: "npm:^1.0.0" + estree-util-attach-comments: "npm:^3.0.0" + estree-util-is-identifier-name: "npm:^3.0.0" + hast-util-whitespace: "npm:^3.0.0" + mdast-util-mdx-expression: "npm:^2.0.0" + mdast-util-mdx-jsx: "npm:^3.0.0" + mdast-util-mdxjs-esm: "npm:^2.0.0" + property-information: "npm:^6.0.0" + space-separated-tokens: "npm:^2.0.0" + style-to-object: "npm:^0.4.0" + unist-util-position: "npm:^5.0.0" + zwitch: "npm:^2.0.0" + checksum: 10c0/9003a8bac26a4580d5fc9f2a271d17330dd653266425e9f5539feecd2f7538868d6630a18f70698b8b804bf14c306418a3f4ab3119bb4692aca78b0c08b1291e + languageName: node + linkType: hard + +"hast-util-to-jsx-runtime@npm:^2.0.0": + version: 2.3.0 + resolution: "hast-util-to-jsx-runtime@npm:2.3.0" + dependencies: + "@types/estree": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + "@types/unist": "npm:^3.0.0" + comma-separated-tokens: "npm:^2.0.0" + devlop: "npm:^1.0.0" + estree-util-is-identifier-name: "npm:^3.0.0" + hast-util-whitespace: "npm:^3.0.0" + mdast-util-mdx-expression: "npm:^2.0.0" + mdast-util-mdx-jsx: "npm:^3.0.0" + mdast-util-mdxjs-esm: "npm:^2.0.0" + property-information: "npm:^6.0.0" + space-separated-tokens: "npm:^2.0.0" + style-to-object: "npm:^1.0.0" + unist-util-position: "npm:^5.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/df7a36dcc792df7667a54438f044b721753d5e09692606d23bf7336bf4651670111fe7728eebbf9f0e4f96ab3346a05bb23037fa1b1d115482b3bc5bde8b6912 + languageName: node + linkType: hard + +"hast-util-to-parse5@npm:^8.0.0": + version: 8.0.0 + resolution: "hast-util-to-parse5@npm:8.0.0" + dependencies: + "@types/hast": "npm:^3.0.0" + comma-separated-tokens: "npm:^2.0.0" + devlop: "npm:^1.0.0" + property-information: "npm:^6.0.0" + space-separated-tokens: "npm:^2.0.0" + web-namespaces: "npm:^2.0.0" + zwitch: "npm:^2.0.0" + checksum: 10c0/3c0c7fba026e0c4be4675daf7277f9ff22ae6da801435f1b7104f7740de5422576f1c025023c7b3df1d0a161e13a04c6ab8f98ada96eb50adb287b537849a2bd + languageName: node + linkType: hard + +"hast-util-whitespace@npm:^3.0.0": + version: 3.0.0 + resolution: "hast-util-whitespace@npm:3.0.0" + dependencies: + "@types/hast": "npm:^3.0.0" + checksum: 10c0/b898bc9fe27884b272580d15260b6bbdabe239973a147e97fa98c45fa0ffec967a481aaa42291ec34fb56530dc2d484d473d7e2bae79f39c83f3762307edfea8 + languageName: node + linkType: hard + +"hastscript@npm:^8.0.0": + version: 8.0.0 + resolution: "hastscript@npm:8.0.0" + dependencies: + "@types/hast": "npm:^3.0.0" + comma-separated-tokens: "npm:^2.0.0" + hast-util-parse-selector: "npm:^4.0.0" + property-information: "npm:^6.0.0" + space-separated-tokens: "npm:^2.0.0" + checksum: 10c0/f0b54bbdd710854b71c0f044612db0fe1b5e4d74fa2001633dc8c535c26033269f04f536f9fd5b03f234de1111808f9e230e9d19493bf919432bb24d541719e0 + languageName: node + linkType: hard + +"he@npm:^1.2.0": + version: 1.2.0 + resolution: "he@npm:1.2.0" + bin: + he: bin/he + checksum: 10c0/a27d478befe3c8192f006cdd0639a66798979dfa6e2125c6ac582a19a5ebfec62ad83e8382e6036170d873f46e4536a7e795bf8b95bf7c247f4cc0825ccc8c17 + languageName: node + linkType: hard + +"history@npm:^4.9.0": + version: 4.10.1 + resolution: "history@npm:4.10.1" + dependencies: + "@babel/runtime": "npm:^7.1.2" + loose-envify: "npm:^1.2.0" + resolve-pathname: "npm:^3.0.0" + tiny-invariant: "npm:^1.0.2" + tiny-warning: "npm:^1.0.0" + value-equal: "npm:^1.0.1" + checksum: 10c0/35377694e4f10f2cf056a9cb1a8ee083e04e4b4717a63baeee4afd565658a62c7e73700bf9e82aa53dbe1ec94e0a25a83c080d63bad8ee6b274a98d2fbc5ed4c + languageName: node + linkType: hard + +"hoist-non-react-statics@npm:^3.1.0": + version: 3.3.2 + resolution: "hoist-non-react-statics@npm:3.3.2" + dependencies: + react-is: "npm:^16.7.0" + checksum: 10c0/fe0889169e845d738b59b64badf5e55fa3cf20454f9203d1eb088df322d49d4318df774828e789898dcb280e8a5521bb59b3203385662ca5e9218a6ca5820e74 + languageName: node + linkType: hard + +"hosted-git-info@npm:^7.0.0": + version: 7.0.2 + resolution: "hosted-git-info@npm:7.0.2" + dependencies: + lru-cache: "npm:^10.0.1" + checksum: 10c0/b19dbd92d3c0b4b0f1513cf79b0fc189f54d6af2129eeb201de2e9baaa711f1936929c848b866d9c8667a0f956f34bf4f07418c12be1ee9ca74fd9246335ca1f + languageName: node + linkType: hard + +"hpack.js@npm:^2.1.6": + version: 2.1.6 + resolution: "hpack.js@npm:2.1.6" + dependencies: + inherits: "npm:^2.0.1" + obuf: "npm:^1.0.0" + readable-stream: "npm:^2.0.1" + wbuf: "npm:^1.1.0" + checksum: 10c0/55b9e824430bab82a19d079cb6e33042d7d0640325678c9917fcc020c61d8a08ca671b6c942c7f0aae9bb6e4b67ffb50734a72f9e21d66407c3138c1983b70f0 + languageName: node + linkType: hard + +"html-entities@npm:^2.3.2": + version: 2.5.2 + resolution: "html-entities@npm:2.5.2" + checksum: 10c0/f20ffb4326606245c439c231de40a7c560607f639bf40ffbfb36b4c70729fd95d7964209045f1a4e62fe17f2364cef3d6e49b02ea09016f207fde51c2211e481 + languageName: node + linkType: hard + +"html-escaper@npm:^2.0.2": + version: 2.0.2 + resolution: "html-escaper@npm:2.0.2" + checksum: 10c0/208e8a12de1a6569edbb14544f4567e6ce8ecc30b9394fcaa4e7bb1e60c12a7c9a1ed27e31290817157e8626f3a4f29e76c8747030822eb84a6abb15c255f0a0 + languageName: node + linkType: hard + +"html-minifier-terser@npm:^6.0.2": + version: 6.1.0 + resolution: "html-minifier-terser@npm:6.1.0" + dependencies: + camel-case: "npm:^4.1.2" + clean-css: "npm:^5.2.2" + commander: "npm:^8.3.0" + he: "npm:^1.2.0" + param-case: "npm:^3.0.4" + relateurl: "npm:^0.2.7" + terser: "npm:^5.10.0" + bin: + html-minifier-terser: cli.js + checksum: 10c0/1aa4e4f01cf7149e3ac5ea84fb7a1adab86da40d38d77a6fff42852b5ee3daccb78b615df97264e3a6a5c33e57f0c77f471d607ca1e1debd1dab9b58286f4b5a + languageName: node + linkType: hard + +"html-minifier-terser@npm:^7.2.0": + version: 7.2.0 + resolution: "html-minifier-terser@npm:7.2.0" + dependencies: + camel-case: "npm:^4.1.2" + clean-css: "npm:~5.3.2" + commander: "npm:^10.0.0" + entities: "npm:^4.4.0" + param-case: "npm:^3.0.4" + relateurl: "npm:^0.2.7" + terser: "npm:^5.15.1" + bin: + html-minifier-terser: cli.js + checksum: 10c0/ffc97c17299d9ec30e17269781b816ea2fc411a9206fc9e768be8f2decb1ea1470892809babb23bb4e3ab1f64d606d97e1803bf526ae3af71edc0fd3070b94b9 + languageName: node + linkType: hard + +"html-tags@npm:^3.3.1": + version: 3.3.1 + resolution: "html-tags@npm:3.3.1" + checksum: 10c0/680165e12baa51bad7397452d247dbcc5a5c29dac0e6754b1187eee3bf26f514bc1907a431dd2f7eb56207611ae595ee76a0acc8eaa0d931e72c791dd6463d79 + languageName: node + linkType: hard + +"html-void-elements@npm:^3.0.0": + version: 3.0.0 + resolution: "html-void-elements@npm:3.0.0" + checksum: 10c0/a8b9ec5db23b7c8053876dad73a0336183e6162bf6d2677376d8b38d654fdc59ba74fdd12f8812688f7db6fad451210c91b300e472afc0909224e0a44c8610d2 + languageName: node + linkType: hard + +"html-webpack-plugin@npm:^5.5.3": + version: 5.6.0 + resolution: "html-webpack-plugin@npm:5.6.0" + dependencies: + "@types/html-minifier-terser": "npm:^6.0.0" + html-minifier-terser: "npm:^6.0.2" + lodash: "npm:^4.17.21" + pretty-error: "npm:^4.0.0" + tapable: "npm:^2.0.0" + peerDependencies: + "@rspack/core": 0.x || 1.x + webpack: ^5.20.0 + peerDependenciesMeta: + "@rspack/core": + optional: true + webpack: + optional: true + checksum: 10c0/50d1a0f90d512463ea8d798985d91a7ccc9d5e461713dedb240125b2ff0671f58135dd9355f7969af341ff4725e73b2defbc0984cfdce930887a48506d970002 + languageName: node + linkType: hard + +"htmlparser2@npm:^6.1.0": + version: 6.1.0 + resolution: "htmlparser2@npm:6.1.0" + dependencies: + domelementtype: "npm:^2.0.1" + domhandler: "npm:^4.0.0" + domutils: "npm:^2.5.2" + entities: "npm:^2.0.0" + checksum: 10c0/3058499c95634f04dc66be8c2e0927cd86799413b2d6989d8ae542ca4dbf5fa948695d02c27d573acf44843af977aec6d9a7bdd0f6faa6b2d99e2a729b2a31b6 + languageName: node + linkType: hard + +"htmlparser2@npm:^8.0.1": + version: 8.0.2 + resolution: "htmlparser2@npm:8.0.2" + dependencies: + domelementtype: "npm:^2.3.0" + domhandler: "npm:^5.0.3" + domutils: "npm:^3.0.1" + entities: "npm:^4.4.0" + checksum: 10c0/609cca85886d0bf2c9a5db8c6926a89f3764596877492e2caa7a25a789af4065bc6ee2cdc81807fe6b1d03a87bf8a373b5a754528a4cc05146b713c20575aab4 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.1": + version: 4.1.1 + resolution: "http-cache-semantics@npm:4.1.1" + checksum: 10c0/ce1319b8a382eb3cbb4a37c19f6bfe14e5bb5be3d09079e885e8c513ab2d3cd9214902f8a31c9dc4e37022633ceabfc2d697405deeaf1b8f3552bb4ed996fdfc + languageName: node + linkType: hard + +"http-deceiver@npm:^1.2.7": + version: 1.2.7 + resolution: "http-deceiver@npm:1.2.7" + checksum: 10c0/8bb9b716f5fc55f54a451da7f49b9c695c3e45498a789634daec26b61e4add7c85613a4a9e53726c39d09de7a163891ecd6eb5809adb64500a840fd86fe81d03 + languageName: node + linkType: hard + +"http-errors@npm:2.0.0": + version: 2.0.0 + resolution: "http-errors@npm:2.0.0" + dependencies: + depd: "npm:2.0.0" + inherits: "npm:2.0.4" + setprototypeof: "npm:1.2.0" + statuses: "npm:2.0.1" + toidentifier: "npm:1.0.1" + checksum: 10c0/fc6f2715fe188d091274b5ffc8b3657bd85c63e969daa68ccb77afb05b071a4b62841acb7a21e417b5539014dff2ebf9550f0b14a9ff126f2734a7c1387f8e19 + languageName: node + linkType: hard + +"http-errors@npm:~1.6.2": + version: 1.6.3 + resolution: "http-errors@npm:1.6.3" + dependencies: + depd: "npm:~1.1.2" + inherits: "npm:2.0.3" + setprototypeof: "npm:1.1.0" + statuses: "npm:>= 1.4.0 < 2" + checksum: 10c0/17ec4046ee974477778bfdd525936c254b872054703ec2caa4d6f099566b8adade636ae6aeeacb39302c5cd6e28fb407ebd937f500f5010d0b6850750414ff78 + languageName: node + linkType: hard + +"http-parser-js@npm:>=0.5.1": + version: 0.5.8 + resolution: "http-parser-js@npm:0.5.8" + checksum: 10c0/4ed89f812c44f84c4ae5d43dd3a0c47942b875b63be0ed2ccecbe6b0018af867d806495fc6e12474aff868721163699c49246585bddea4f0ecc6d2b02e19faf1 + languageName: node + linkType: hard + +"http-proxy-agent@npm:^7.0.0": + version: 7.0.2 + resolution: "http-proxy-agent@npm:7.0.2" + dependencies: + agent-base: "npm:^7.1.0" + debug: "npm:^4.3.4" + checksum: 10c0/4207b06a4580fb85dd6dff521f0abf6db517489e70863dca1a0291daa7f2d3d2d6015a57bd702af068ea5cf9f1f6ff72314f5f5b4228d299c0904135d2aef921 + languageName: node + linkType: hard + +"http-proxy-middleware@npm:^2.0.3": + version: 2.0.6 + resolution: "http-proxy-middleware@npm:2.0.6" + dependencies: + "@types/http-proxy": "npm:^1.17.8" + http-proxy: "npm:^1.18.1" + is-glob: "npm:^4.0.1" + is-plain-obj: "npm:^3.0.0" + micromatch: "npm:^4.0.2" + peerDependencies: + "@types/express": ^4.17.13 + peerDependenciesMeta: + "@types/express": + optional: true + checksum: 10c0/25a0e550dd1900ee5048a692e0e9b2b6339d06d487a705d90c47e359e9c6561d648cd7862d001d090e651c9efffa1b6e5160fcf1f299b5fa4935f76e9754eb11 + languageName: node + linkType: hard + +"http-proxy@npm:^1.18.1": + version: 1.18.1 + resolution: "http-proxy@npm:1.18.1" + dependencies: + eventemitter3: "npm:^4.0.0" + follow-redirects: "npm:^1.0.0" + requires-port: "npm:^1.0.0" + checksum: 10c0/148dfa700a03fb421e383aaaf88ac1d94521dfc34072f6c59770528c65250983c2e4ec996f2f03aa9f3fe46cd1270a593126068319311e3e8d9e610a37533e94 + languageName: node + linkType: hard + +"http2-wrapper@npm:^2.1.10": + version: 2.2.1 + resolution: "http2-wrapper@npm:2.2.1" + dependencies: + quick-lru: "npm:^5.1.1" + resolve-alpn: "npm:^1.2.0" + checksum: 10c0/7207201d3c6e53e72e510c9b8912e4f3e468d3ecc0cf3bf52682f2aac9cd99358b896d1da4467380adc151cf97c412bedc59dc13dae90c523f42053a7449eedb + languageName: node + linkType: hard + +"https-proxy-agent@npm:^7.0.1": + version: 7.0.5 + resolution: "https-proxy-agent@npm:7.0.5" + dependencies: + agent-base: "npm:^7.0.2" + debug: "npm:4" + checksum: 10c0/2490e3acec397abeb88807db52cac59102d5ed758feee6df6112ab3ccd8325e8a1ce8bce6f4b66e5470eca102d31e425ace904242e4fa28dbe0c59c4bafa7b2c + languageName: node + linkType: hard + +"human-signals@npm:^2.1.0": + version: 2.1.0 + resolution: "human-signals@npm:2.1.0" + checksum: 10c0/695edb3edfcfe9c8b52a76926cd31b36978782062c0ed9b1192b36bebc75c4c87c82e178dfcb0ed0fc27ca59d434198aac0bd0be18f5781ded775604db22304a + languageName: node + linkType: hard + +"iconv-lite@npm:0.4.24": + version: 0.4.24 + resolution: "iconv-lite@npm:0.4.24" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3" + checksum: 10c0/c6886a24cc00f2a059767440ec1bc00d334a89f250db8e0f7feb4961c8727118457e27c495ba94d082e51d3baca378726cd110aaf7ded8b9bbfd6a44760cf1d4 + languageName: node + linkType: hard + +"iconv-lite@npm:0.6, iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3.0.0" + checksum: 10c0/98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 + languageName: node + linkType: hard + +"icss-utils@npm:^5.0.0, icss-utils@npm:^5.1.0": + version: 5.1.0 + resolution: "icss-utils@npm:5.1.0" + peerDependencies: + postcss: ^8.1.0 + checksum: 10c0/39c92936fabd23169c8611d2b5cc39e39d10b19b0d223352f20a7579f75b39d5f786114a6b8fc62bee8c5fed59ba9e0d38f7219a4db383e324fb3061664b043d + languageName: node + linkType: hard + +"ieee754@npm:^1.1.13": + version: 1.2.1 + resolution: "ieee754@npm:1.2.1" + checksum: 10c0/b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb + languageName: node + linkType: hard + +"ignore@npm:^5.0.0, ignore@npm:^5.2.0, ignore@npm:^5.2.4, ignore@npm:^5.3.1": + version: 5.3.2 + resolution: "ignore@npm:5.3.2" + checksum: 10c0/f9f652c957983634ded1e7f02da3b559a0d4cc210fca3792cb67f1b153623c9c42efdc1c4121af171e295444459fc4a9201101fb041b1104a3c000bccb188337 + languageName: node + linkType: hard + +"image-size@npm:^1.0.2": + version: 1.1.1 + resolution: "image-size@npm:1.1.1" + dependencies: + queue: "npm:6.0.2" + bin: + image-size: bin/image-size.js + checksum: 10c0/2660470096d12be82195f7e80fe03274689fbd14184afb78eaf66ade7cd06352518325814f88af4bde4b26647889fe49e573129f6e7ba8f5ff5b85cc7f559000 + languageName: node + linkType: hard + +"immer@npm:^9.0.7": + version: 9.0.21 + resolution: "immer@npm:9.0.21" + checksum: 10c0/03ea3ed5d4d72e8bd428df4a38ad7e483ea8308e9a113d3b42e0ea2cc0cc38340eb0a6aca69592abbbf047c685dbda04e3d34bf2ff438ab57339ed0a34cc0a05 + languageName: node + linkType: hard + +"immutable@npm:^4.0.0": + version: 4.3.7 + resolution: "immutable@npm:4.3.7" + checksum: 10c0/9b099197081b22f6433003e34929da8ecddbbdc1474cdc8aa3b7669dee4adda349c06143de22def36016d1b6de5322b043eccd7a11db1dad2ca85dad4fff5435 + languageName: node + linkType: hard + +"import-fresh@npm:^3.1.0, import-fresh@npm:^3.2.1, import-fresh@npm:^3.3.0": + version: 3.3.0 + resolution: "import-fresh@npm:3.3.0" + dependencies: + parent-module: "npm:^1.0.0" + resolve-from: "npm:^4.0.0" + checksum: 10c0/7f882953aa6b740d1f0e384d0547158bc86efbf2eea0f1483b8900a6f65c5a5123c2cf09b0d542cc419d0b98a759ecaeb394237e97ea427f2da221dc3cd80cc3 + languageName: node + linkType: hard + +"import-lazy@npm:^4.0.0": + version: 4.0.0 + resolution: "import-lazy@npm:4.0.0" + checksum: 10c0/a3520313e2c31f25c0b06aa66d167f329832b68a4f957d7c9daf6e0fa41822b6e84948191648b9b9d8ca82f94740cdf15eecf2401a5b42cd1c33fd84f2225cca + languageName: node + linkType: hard + +"import-meta-resolve@npm:^4.0.0": + version: 4.1.0 + resolution: "import-meta-resolve@npm:4.1.0" + checksum: 10c0/42f3284b0460635ddf105c4ad99c6716099c3ce76702602290ad5cbbcd295700cbc04e4bdf47bacf9e3f1a4cec2e1ff887dabc20458bef398f9de22ddff45ef5 + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 10c0/8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 + languageName: node + linkType: hard + +"indent-string@npm:^4.0.0": + version: 4.0.0 + resolution: "indent-string@npm:4.0.0" + checksum: 10c0/1e1904ddb0cb3d6cce7cd09e27a90184908b7a5d5c21b92e232c93579d314f0b83c246ffb035493d0504b1e9147ba2c9b21df0030f48673fba0496ecd698161f + languageName: node + linkType: hard + +"infima@npm:0.2.0-alpha.44": + version: 0.2.0-alpha.44 + resolution: "infima@npm:0.2.0-alpha.44" + checksum: 10c0/0fe2b7882e09187ee62e5192673c542513fe4743f727f887e195de4f26eb792ddf81577ca98c34a69ab7eb39251f60531b9ad6d2f454553bac326b1afc9d68b5 + languageName: node + linkType: hard + +"inflight@npm:^1.0.4": + version: 1.0.6 + resolution: "inflight@npm:1.0.6" + dependencies: + once: "npm:^1.3.0" + wrappy: "npm:1" + checksum: 10c0/7faca22584600a9dc5b9fca2cd5feb7135ac8c935449837b315676b4c90aa4f391ec4f42240178244b5a34e8bede1948627fda392ca3191522fc46b34e985ab2 + languageName: node + linkType: hard + +"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": + version: 2.0.4 + resolution: "inherits@npm:2.0.4" + checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 + languageName: node + linkType: hard + +"inherits@npm:2.0.3": + version: 2.0.3 + resolution: "inherits@npm:2.0.3" + checksum: 10c0/6e56402373149ea076a434072671f9982f5fad030c7662be0332122fe6c0fa490acb3cc1010d90b6eff8d640b1167d77674add52dfd1bb85d545cf29e80e73e7 + languageName: node + linkType: hard + +"ini@npm:2.0.0": + version: 2.0.0 + resolution: "ini@npm:2.0.0" + checksum: 10c0/2e0c8f386369139029da87819438b20a1ff3fe58372d93fb1a86e9d9344125ace3a806b8ec4eb160a46e64cbc422fe68251869441676af49b7fc441af2389c25 + languageName: node + linkType: hard + +"ini@npm:^1.3.4, ini@npm:^1.3.5, ini@npm:~1.3.0": + version: 1.3.8 + resolution: "ini@npm:1.3.8" + checksum: 10c0/ec93838d2328b619532e4f1ff05df7909760b6f66d9c9e2ded11e5c1897d6f2f9980c54dd638f88654b00919ce31e827040631eab0a3969e4d1abefa0719516a + languageName: node + linkType: hard + +"ini@npm:^4.1.2, ini@npm:^4.1.3": + version: 4.1.3 + resolution: "ini@npm:4.1.3" + checksum: 10c0/0d27eff094d5f3899dd7c00d0c04ea733ca03a8eb6f9406ce15daac1a81de022cb417d6eaff7e4342451ffa663389c565ffc68d6825eaf686bf003280b945764 + languageName: node + linkType: hard + +"inline-style-parser@npm:0.1.1": + version: 0.1.1 + resolution: "inline-style-parser@npm:0.1.1" + checksum: 10c0/08832a533f51a1e17619f2eabf2f5ec5e956d6dcba1896351285c65df022c9420de61d73256e1dca8015a52abf96cc84ddc3b73b898b22de6589d3962b5e501b + languageName: node + linkType: hard + +"inline-style-parser@npm:0.2.3": + version: 0.2.3 + resolution: "inline-style-parser@npm:0.2.3" + checksum: 10c0/21b46d39a39c8aeaa738346650469388e8a412dd276ab75aa3d85b1883311e89c86a1fdbb8c2f1958f4c979bae74067f6ba0385455b125faf4fa77e1dbb94799 + languageName: node + linkType: hard + +"internal-slot@npm:^1.0.7": + version: 1.0.7 + resolution: "internal-slot@npm:1.0.7" + dependencies: + es-errors: "npm:^1.3.0" + hasown: "npm:^2.0.0" + side-channel: "npm:^1.0.4" + checksum: 10c0/f8b294a4e6ea3855fc59551bbf35f2b832cf01fd5e6e2a97f5c201a071cc09b49048f856e484b67a6c721da5e55736c5b6ddafaf19e2dbeb4a3ff1821680de6c + languageName: node + linkType: hard + +"internmap@npm:1 - 2": + version: 2.0.3 + resolution: "internmap@npm:2.0.3" + checksum: 10c0/8cedd57f07bbc22501516fbfc70447f0c6812871d471096fad9ea603516eacc2137b633633daf432c029712df0baefd793686388ddf5737e3ea15074b877f7ed + languageName: node + linkType: hard + +"internmap@npm:^1.0.0": + version: 1.0.1 + resolution: "internmap@npm:1.0.1" + checksum: 10c0/60942be815ca19da643b6d4f23bd0bf4e8c97abbd080fb963fe67583b60bdfb3530448ad4486bae40810e92317bded9995cc31411218acc750d72cd4e8646eee + languageName: node + linkType: hard + +"interpret@npm:^1.0.0": + version: 1.4.0 + resolution: "interpret@npm:1.4.0" + checksum: 10c0/08c5ad30032edeec638485bc3f6db7d0094d9b3e85e0f950866600af3c52e9fd69715416d29564731c479d9f4d43ff3e4d302a178196bdc0e6837ec147640450 + languageName: node + linkType: hard + +"invariant@npm:^2.2.4": + version: 2.2.4 + resolution: "invariant@npm:2.2.4" + dependencies: + loose-envify: "npm:^1.0.0" + checksum: 10c0/5af133a917c0bcf65e84e7f23e779e7abc1cd49cb7fdc62d00d1de74b0d8c1b5ee74ac7766099fb3be1b05b26dfc67bab76a17030d2fe7ea2eef867434362dfc + languageName: node + linkType: hard + +"ip-address@npm:^9.0.5": + version: 9.0.5 + resolution: "ip-address@npm:9.0.5" + dependencies: + jsbn: "npm:1.1.0" + sprintf-js: "npm:^1.1.3" + checksum: 10c0/331cd07fafcb3b24100613e4b53e1a2b4feab11e671e655d46dc09ee233da5011284d09ca40c4ecbdfe1d0004f462958675c224a804259f2f78d2465a87824bc + languageName: node + linkType: hard + +"ipaddr.js@npm:1.9.1": + version: 1.9.1 + resolution: "ipaddr.js@npm:1.9.1" + checksum: 10c0/0486e775047971d3fdb5fb4f063829bac45af299ae0b82dcf3afa2145338e08290563a2a70f34b732d795ecc8311902e541a8530eeb30d75860a78ff4e94ce2a + languageName: node + linkType: hard + +"ipaddr.js@npm:^2.0.1": + version: 2.2.0 + resolution: "ipaddr.js@npm:2.2.0" + checksum: 10c0/e4ee875dc1bd92ac9d27e06cfd87cdb63ca786ff9fd7718f1d4f7a8ef27db6e5d516128f52d2c560408cbb75796ac2f83ead669e73507c86282d45f84c5abbb6 + languageName: node + linkType: hard + +"is-alphabetical@npm:^1.0.0": + version: 1.0.4 + resolution: "is-alphabetical@npm:1.0.4" + checksum: 10c0/1505b1de5a1fd74022c05fb21b0e683a8f5229366bac8dc4d34cf6935bcfd104d1125a5e6b083fb778847629f76e5bdac538de5367bdf2b927a1356164e23985 + languageName: node + linkType: hard + +"is-alphabetical@npm:^2.0.0": + version: 2.0.1 + resolution: "is-alphabetical@npm:2.0.1" + checksum: 10c0/932367456f17237533fd1fc9fe179df77957271020b83ea31da50e5cc472d35ef6b5fb8147453274ffd251134472ce24eb6f8d8398d96dee98237cdb81a6c9a7 + languageName: node + linkType: hard + +"is-alphanumerical@npm:^1.0.0": + version: 1.0.4 + resolution: "is-alphanumerical@npm:1.0.4" + dependencies: + is-alphabetical: "npm:^1.0.0" + is-decimal: "npm:^1.0.0" + checksum: 10c0/d623abae7130a7015c6bf33d99151d4e7005572fd170b86568ff4de5ae86ac7096608b87dd4a1d4dbbd497e392b6396930ba76c9297a69455909cebb68005905 + languageName: node + linkType: hard + +"is-alphanumerical@npm:^2.0.0": + version: 2.0.1 + resolution: "is-alphanumerical@npm:2.0.1" + dependencies: + is-alphabetical: "npm:^2.0.0" + is-decimal: "npm:^2.0.0" + checksum: 10c0/4b35c42b18e40d41378293f82a3ecd9de77049b476f748db5697c297f686e1e05b072a6aaae2d16f54d2a57f85b00cbbe755c75f6d583d1c77d6657bd0feb5a2 + languageName: node + linkType: hard + +"is-array-buffer@npm:^3.0.4": + version: 3.0.4 + resolution: "is-array-buffer@npm:3.0.4" + dependencies: + call-bind: "npm:^1.0.2" + get-intrinsic: "npm:^1.2.1" + checksum: 10c0/42a49d006cc6130bc5424eae113e948c146f31f9d24460fc0958f855d9d810e6fd2e4519bf19aab75179af9c298ea6092459d8cafdec523cd19e529b26eab860 + languageName: node + linkType: hard + +"is-arrayish@npm:^0.2.1": + version: 0.2.1 + resolution: "is-arrayish@npm:0.2.1" + checksum: 10c0/e7fb686a739068bb70f860b39b67afc62acc62e36bb61c5f965768abce1873b379c563e61dd2adad96ebb7edf6651111b385e490cf508378959b0ed4cac4e729 + languageName: node + linkType: hard + +"is-arrayish@npm:^0.3.1": + version: 0.3.2 + resolution: "is-arrayish@npm:0.3.2" + checksum: 10c0/f59b43dc1d129edb6f0e282595e56477f98c40278a2acdc8b0a5c57097c9eff8fe55470493df5775478cf32a4dc8eaf6d3a749f07ceee5bc263a78b2434f6a54 + languageName: node + linkType: hard + +"is-async-function@npm:^2.0.0": + version: 2.0.0 + resolution: "is-async-function@npm:2.0.0" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/787bc931576aad525d751fc5ce211960fe91e49ac84a5c22d6ae0bc9541945fbc3f686dc590c3175722ce4f6d7b798a93f6f8ff4847fdb2199aea6f4baf5d668 + languageName: node + linkType: hard + +"is-bigint@npm:^1.0.1": + version: 1.0.4 + resolution: "is-bigint@npm:1.0.4" + dependencies: + has-bigints: "npm:^1.0.1" + checksum: 10c0/eb9c88e418a0d195ca545aff2b715c9903d9b0a5033bc5922fec600eb0c3d7b1ee7f882dbf2e0d5a6e694e42391be3683e4368737bd3c4a77f8ac293e7773696 + languageName: node + linkType: hard + +"is-binary-path@npm:~2.1.0": + version: 2.1.0 + resolution: "is-binary-path@npm:2.1.0" + dependencies: + binary-extensions: "npm:^2.0.0" + checksum: 10c0/a16eaee59ae2b315ba36fad5c5dcaf8e49c3e27318f8ab8fa3cdb8772bf559c8d1ba750a589c2ccb096113bb64497084361a25960899cb6172a6925ab6123d38 + languageName: node + linkType: hard + +"is-boolean-object@npm:^1.1.0": + version: 1.1.2 + resolution: "is-boolean-object@npm:1.1.2" + dependencies: + call-bind: "npm:^1.0.2" + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/6090587f8a8a8534c0f816da868bc94f32810f08807aa72fa7e79f7e11c466d281486ffe7a788178809c2aa71fe3e700b167fe80dd96dad68026bfff8ebf39f7 + languageName: node + linkType: hard + +"is-callable@npm:^1.1.3, is-callable@npm:^1.1.4, is-callable@npm:^1.2.7": + version: 1.2.7 + resolution: "is-callable@npm:1.2.7" + checksum: 10c0/ceebaeb9d92e8adee604076971dd6000d38d6afc40bb843ea8e45c5579b57671c3f3b50d7f04869618242c6cee08d1b67806a8cb8edaaaf7c0748b3720d6066f + languageName: node + linkType: hard + +"is-ci@npm:^3.0.1": + version: 3.0.1 + resolution: "is-ci@npm:3.0.1" + dependencies: + ci-info: "npm:^3.2.0" + bin: + is-ci: bin.js + checksum: 10c0/0e81caa62f4520d4088a5bef6d6337d773828a88610346c4b1119fb50c842587ed8bef1e5d9a656835a599e7209405b5761ddf2339668f2d0f4e889a92fe6051 + languageName: node + linkType: hard + +"is-core-module@npm:^2.13.0": + version: 2.15.1 + resolution: "is-core-module@npm:2.15.1" + dependencies: + hasown: "npm:^2.0.2" + checksum: 10c0/53432f10c69c40bfd2fa8914133a68709ff9498c86c3bf5fca3cdf3145a56fd2168cbf4a43b29843a6202a120a5f9c5ffba0a4322e1e3441739bc0b641682612 + languageName: node + linkType: hard + +"is-data-view@npm:^1.0.1": + version: 1.0.1 + resolution: "is-data-view@npm:1.0.1" + dependencies: + is-typed-array: "npm:^1.1.13" + checksum: 10c0/a3e6ec84efe303da859107aed9b970e018e2bee7ffcb48e2f8096921a493608134240e672a2072577e5f23a729846241d9634806e8a0e51d9129c56d5f65442d + languageName: node + linkType: hard + +"is-date-object@npm:^1.0.1, is-date-object@npm:^1.0.5": + version: 1.0.5 + resolution: "is-date-object@npm:1.0.5" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/eed21e5dcc619c48ccef804dfc83a739dbb2abee6ca202838ee1bd5f760fe8d8a93444f0d49012ad19bb7c006186e2884a1b92f6e1c056da7fd23d0a9ad5992e + languageName: node + linkType: hard + +"is-decimal@npm:^1.0.0": + version: 1.0.4 + resolution: "is-decimal@npm:1.0.4" + checksum: 10c0/a4ad53c4c5c4f5a12214e7053b10326711f6a71f0c63ba1314a77bd71df566b778e4ebd29f9fb6815f07a4dc50c3767fb19bd6fc9fa05e601410f1d64ffeac48 + languageName: node + linkType: hard + +"is-decimal@npm:^2.0.0": + version: 2.0.1 + resolution: "is-decimal@npm:2.0.1" + checksum: 10c0/8085dd66f7d82f9de818fba48b9e9c0429cb4291824e6c5f2622e96b9680b54a07a624cfc663b24148b8e853c62a1c987cfe8b0b5a13f5156991afaf6736e334 + languageName: node + linkType: hard + +"is-docker@npm:^2.0.0, is-docker@npm:^2.1.1": + version: 2.2.1 + resolution: "is-docker@npm:2.2.1" + bin: + is-docker: cli.js + checksum: 10c0/e828365958d155f90c409cdbe958f64051d99e8aedc2c8c4cd7c89dcf35329daed42f7b99346f7828df013e27deb8f721cf9408ba878c76eb9e8290235fbcdcc + languageName: node + linkType: hard + +"is-empty@npm:^1.0.0": + version: 1.2.0 + resolution: "is-empty@npm:1.2.0" + checksum: 10c0/f0dd6534716f2749586c35f1dcf37a0a5ac31e91d629ae2652b36c7f72c0ce71f0b68f082a6eed95b1af6f84ba31cd757c2343b19507878ed1e532a3383ebaaa + languageName: node + linkType: hard + +"is-extendable@npm:^0.1.0": + version: 0.1.1 + resolution: "is-extendable@npm:0.1.1" + checksum: 10c0/dd5ca3994a28e1740d1e25192e66eed128e0b2ff161a7ea348e87ae4f616554b486854de423877a2a2c171d5f7cd6e8093b91f54533bc88a59ee1c9838c43879 + languageName: node + linkType: hard + +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: 10c0/5487da35691fbc339700bbb2730430b07777a3c21b9ebaecb3072512dfd7b4ba78ac2381a87e8d78d20ea08affb3f1971b4af629173a6bf435ff8a4c47747912 + languageName: node + linkType: hard + +"is-finalizationregistry@npm:^1.0.2": + version: 1.0.2 + resolution: "is-finalizationregistry@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.2" + checksum: 10c0/81caecc984d27b1a35c68741156fc651fb1fa5e3e6710d21410abc527eb226d400c0943a167922b2e920f6b3e58b0dede9aa795882b038b85f50b3a4b877db86 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc + languageName: node + linkType: hard + +"is-generator-function@npm:^1.0.10": + version: 1.0.10 + resolution: "is-generator-function@npm:1.0.10" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/df03514df01a6098945b5a0cfa1abff715807c8e72f57c49a0686ad54b3b74d394e2d8714e6f709a71eb00c9630d48e73ca1796c1ccc84ac95092c1fecc0d98b + languageName: node + linkType: hard + +"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3, is-glob@npm:~4.0.1": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: "npm:^2.1.1" + checksum: 10c0/17fb4014e22be3bbecea9b2e3a76e9e34ff645466be702f1693e8f1ee1adac84710d0be0bd9f967d6354036fd51ab7c2741d954d6e91dae6bb69714de92c197a + languageName: node + linkType: hard + +"is-hexadecimal@npm:^1.0.0": + version: 1.0.4 + resolution: "is-hexadecimal@npm:1.0.4" + checksum: 10c0/ec4c64e5624c0f240922324bc697e166554f09d3ddc7633fc526084502626445d0a871fbd8cae52a9844e83bd0bb414193cc5a66806d7b2867907003fc70c5ea + languageName: node + linkType: hard + +"is-hexadecimal@npm:^2.0.0": + version: 2.0.1 + resolution: "is-hexadecimal@npm:2.0.1" + checksum: 10c0/3eb60fe2f1e2bbc760b927dcad4d51eaa0c60138cf7fc671803f66353ad90c301605b502c7ea4c6bb0548e1c7e79dfd37b73b632652e3b76030bba603a7e9626 + languageName: node + linkType: hard + +"is-installed-globally@npm:^0.4.0": + version: 0.4.0 + resolution: "is-installed-globally@npm:0.4.0" + dependencies: + global-dirs: "npm:^3.0.0" + is-path-inside: "npm:^3.0.2" + checksum: 10c0/f3e6220ee5824b845c9ed0d4b42c24272701f1f9926936e30c0e676254ca5b34d1b92c6205cae11b283776f9529212c0cdabb20ec280a6451677d6493ca9c22d + languageName: node + linkType: hard + +"is-lambda@npm:^1.0.1": + version: 1.0.1 + resolution: "is-lambda@npm:1.0.1" + checksum: 10c0/85fee098ae62ba6f1e24cf22678805473c7afd0fb3978a3aa260e354cb7bcb3a5806cf0a98403188465efedec41ab4348e8e4e79305d409601323855b3839d4d + languageName: node + linkType: hard + +"is-map@npm:^2.0.3": + version: 2.0.3 + resolution: "is-map@npm:2.0.3" + checksum: 10c0/2c4d431b74e00fdda7162cd8e4b763d6f6f217edf97d4f8538b94b8702b150610e2c64961340015fe8df5b1fcee33ccd2e9b62619c4a8a3a155f8de6d6d355fc + languageName: node + linkType: hard + +"is-negative-zero@npm:^2.0.3": + version: 2.0.3 + resolution: "is-negative-zero@npm:2.0.3" + checksum: 10c0/bcdcf6b8b9714063ffcfa9929c575ac69bfdabb8f4574ff557dfc086df2836cf07e3906f5bbc4f2a5c12f8f3ba56af640c843cdfc74da8caed86c7c7d66fd08e + languageName: node + linkType: hard + +"is-npm@npm:^6.0.0": + version: 6.0.0 + resolution: "is-npm@npm:6.0.0" + checksum: 10c0/1f064c66325cba6e494783bee4e635caa2655aad7f853a0e045d086e0bb7d83d2d6cdf1745dc9a7c7c93dacbf816fbee1f8d9179b02d5d01674d4f92541dc0d9 + languageName: node + linkType: hard + +"is-number-object@npm:^1.0.4": + version: 1.0.7 + resolution: "is-number-object@npm:1.0.7" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/aad266da1e530f1804a2b7bd2e874b4869f71c98590b3964f9d06cc9869b18f8d1f4778f838ecd2a11011bce20aeecb53cb269ba916209b79c24580416b74b1b + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 10c0/b4686d0d3053146095ccd45346461bc8e53b80aeb7671cc52a4de02dbbf7dc0d1d2a986e2fe4ae206984b4d34ef37e8b795ebc4f4295c978373e6575e295d811 + languageName: node + linkType: hard + +"is-obj@npm:^1.0.1": + version: 1.0.1 + resolution: "is-obj@npm:1.0.1" + checksum: 10c0/5003acba0af7aa47dfe0760e545a89bbac89af37c12092c3efadc755372cdaec034f130e7a3653a59eb3c1843cfc72ca71eaf1a6c3bafe5a0bab3611a47f9945 + languageName: node + linkType: hard + +"is-obj@npm:^2.0.0": + version: 2.0.0 + resolution: "is-obj@npm:2.0.0" + checksum: 10c0/85044ed7ba8bd169e2c2af3a178cacb92a97aa75de9569d02efef7f443a824b5e153eba72b9ae3aca6f8ce81955271aa2dc7da67a8b720575d3e38104208cb4e + languageName: node + linkType: hard + +"is-path-cwd@npm:^2.2.0": + version: 2.2.0 + resolution: "is-path-cwd@npm:2.2.0" + checksum: 10c0/afce71533a427a759cd0329301c18950333d7589533c2c90205bd3fdcf7b91eb92d1940493190567a433134d2128ec9325de2fd281e05be1920fbee9edd22e0a + languageName: node + linkType: hard + +"is-path-inside@npm:^3.0.2, is-path-inside@npm:^3.0.3": + version: 3.0.3 + resolution: "is-path-inside@npm:3.0.3" + checksum: 10c0/cf7d4ac35fb96bab6a1d2c3598fe5ebb29aafb52c0aaa482b5a3ed9d8ba3edc11631e3ec2637660c44b3ce0e61a08d54946e8af30dec0b60a7c27296c68ffd05 + languageName: node + linkType: hard + +"is-plain-obj@npm:^3.0.0": + version: 3.0.0 + resolution: "is-plain-obj@npm:3.0.0" + checksum: 10c0/8e6483bfb051d42ec9c704c0ede051a821c6b6f9a6c7a3e3b55aa855e00981b0580c8f3b1f5e2e62649b39179b1abfee35d6f8086d999bfaa32c1908d29b07bc + languageName: node + linkType: hard + +"is-plain-obj@npm:^4.0.0": + version: 4.1.0 + resolution: "is-plain-obj@npm:4.1.0" + checksum: 10c0/32130d651d71d9564dc88ba7e6fda0e91a1010a3694648e9f4f47bb6080438140696d3e3e15c741411d712e47ac9edc1a8a9de1fe76f3487b0d90be06ac9975e + languageName: node + linkType: hard + +"is-plain-object@npm:^2.0.4": + version: 2.0.4 + resolution: "is-plain-object@npm:2.0.4" + dependencies: + isobject: "npm:^3.0.1" + checksum: 10c0/f050fdd5203d9c81e8c4df1b3ff461c4bc64e8b5ca383bcdde46131361d0a678e80bcf00b5257646f6c636197629644d53bd8e2375aea633de09a82d57e942f4 + languageName: node + linkType: hard + +"is-reference@npm:^3.0.0": + version: 3.0.2 + resolution: "is-reference@npm:3.0.2" + dependencies: + "@types/estree": "npm:*" + checksum: 10c0/652d31b405e8e8269071cee78fe874b072745012eba202c6dc86880fd603a65ae043e3160990ab4a0a4b33567cbf662eecf3bc6b3c2c1550e6c2b6cf885ce5aa + languageName: node + linkType: hard + +"is-regex@npm:^1.1.4": + version: 1.1.4 + resolution: "is-regex@npm:1.1.4" + dependencies: + call-bind: "npm:^1.0.2" + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/bb72aae604a69eafd4a82a93002058c416ace8cde95873589a97fc5dac96a6c6c78a9977d487b7b95426a8f5073969124dd228f043f9f604f041f32fcc465fc1 + languageName: node + linkType: hard + +"is-regexp@npm:^1.0.0": + version: 1.0.0 + resolution: "is-regexp@npm:1.0.0" + checksum: 10c0/34cacda1901e00f6e44879378f1d2fa96320ea956c1bec27713130aaf1d44f6e7bd963eed28945bfe37e600cb27df1cf5207302680dad8bdd27b9baff8ecf611 + languageName: node + linkType: hard + +"is-root@npm:^2.1.0": + version: 2.1.0 + resolution: "is-root@npm:2.1.0" + checksum: 10c0/83d3f5b052c3f28fbdbdf0d564bdd34fa14933f5694c78704f85cd1871255bc017fbe3fe2bc2fff2d227c6be5927ad2149b135c0a7c0060e7ac4e610d81a4f01 + languageName: node + linkType: hard + +"is-set@npm:^2.0.3": + version: 2.0.3 + resolution: "is-set@npm:2.0.3" + checksum: 10c0/f73732e13f099b2dc879c2a12341cfc22ccaca8dd504e6edae26484bd5707a35d503fba5b4daad530a9b088ced1ae6c9d8200fd92e09b428fe14ea79ce8080b7 + languageName: node + linkType: hard + +"is-shared-array-buffer@npm:^1.0.2, is-shared-array-buffer@npm:^1.0.3": + version: 1.0.3 + resolution: "is-shared-array-buffer@npm:1.0.3" + dependencies: + call-bind: "npm:^1.0.7" + checksum: 10c0/adc11ab0acbc934a7b9e5e9d6c588d4ec6682f6fea8cda5180721704fa32927582ede5b123349e32517fdadd07958973d24716c80e7ab198970c47acc09e59c7 + languageName: node + linkType: hard + +"is-stream@npm:^2.0.0": + version: 2.0.1 + resolution: "is-stream@npm:2.0.1" + checksum: 10c0/7c284241313fc6efc329b8d7f08e16c0efeb6baab1b4cd0ba579eb78e5af1aa5da11e68559896a2067cd6c526bd29241dda4eb1225e627d5aa1a89a76d4635a5 + languageName: node + linkType: hard + +"is-string@npm:^1.0.5, is-string@npm:^1.0.7": + version: 1.0.7 + resolution: "is-string@npm:1.0.7" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 10c0/905f805cbc6eedfa678aaa103ab7f626aac9ebbdc8737abb5243acaa61d9820f8edc5819106b8fcd1839e33db21de9f0116ae20de380c8382d16dc2a601921f6 + languageName: node + linkType: hard + +"is-symbol@npm:^1.0.2, is-symbol@npm:^1.0.3": + version: 1.0.4 + resolution: "is-symbol@npm:1.0.4" + dependencies: + has-symbols: "npm:^1.0.2" + checksum: 10c0/9381dd015f7c8906154dbcbf93fad769de16b4b961edc94f88d26eb8c555935caa23af88bda0c93a18e65560f6d7cca0fd5a3f8a8e1df6f1abbb9bead4502ef7 + languageName: node + linkType: hard + +"is-typed-array@npm:^1.1.13": + version: 1.1.13 + resolution: "is-typed-array@npm:1.1.13" + dependencies: + which-typed-array: "npm:^1.1.14" + checksum: 10c0/fa5cb97d4a80e52c2cc8ed3778e39f175a1a2ae4ddf3adae3187d69586a1fd57cfa0b095db31f66aa90331e9e3da79184cea9c6abdcd1abc722dc3c3edd51cca + languageName: node + linkType: hard + +"is-typedarray@npm:^1.0.0": + version: 1.0.0 + resolution: "is-typedarray@npm:1.0.0" + checksum: 10c0/4c096275ba041a17a13cca33ac21c16bc4fd2d7d7eb94525e7cd2c2f2c1a3ab956e37622290642501ff4310601e413b675cf399ad6db49855527d2163b3eeeec + languageName: node + linkType: hard + +"is-weakmap@npm:^2.0.2": + version: 2.0.2 + resolution: "is-weakmap@npm:2.0.2" + checksum: 10c0/443c35bb86d5e6cc5929cd9c75a4024bb0fff9586ed50b092f94e700b89c43a33b186b76dbc6d54f3d3d09ece689ab38dcdc1af6a482cbe79c0f2da0a17f1299 + languageName: node + linkType: hard + +"is-weakref@npm:^1.0.2": + version: 1.0.2 + resolution: "is-weakref@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.2" + checksum: 10c0/1545c5d172cb690c392f2136c23eec07d8d78a7f57d0e41f10078aa4f5daf5d7f57b6513a67514ab4f073275ad00c9822fc8935e00229d0a2089e1c02685d4b1 + languageName: node + linkType: hard + +"is-weakset@npm:^2.0.3": + version: 2.0.3 + resolution: "is-weakset@npm:2.0.3" + dependencies: + call-bind: "npm:^1.0.7" + get-intrinsic: "npm:^1.2.4" + checksum: 10c0/8ad6141b6a400e7ce7c7442a13928c676d07b1f315ab77d9912920bf5f4170622f43126f111615788f26c3b1871158a6797c862233124507db0bcc33a9537d1a + languageName: node + linkType: hard + +"is-wsl@npm:^2.2.0": + version: 2.2.0 + resolution: "is-wsl@npm:2.2.0" + dependencies: + is-docker: "npm:^2.0.0" + checksum: 10c0/a6fa2d370d21be487c0165c7a440d567274fbba1a817f2f0bfa41cc5e3af25041d84267baa22df66696956038a43973e72fca117918c91431920bdef490fa25e + languageName: node + linkType: hard + +"is-yarn-global@npm:^0.4.0": + version: 0.4.1 + resolution: "is-yarn-global@npm:0.4.1" + checksum: 10c0/8ff66f33454614f8e913ad91cc4de0d88d519a46c1ed41b3f589da79504ed0fcfa304064fe3096dda9360c5f35aa210cb8e978fd36798f3118cb66a4de64d365 + languageName: node + linkType: hard + +"isarray@npm:0.0.1": + version: 0.0.1 + resolution: "isarray@npm:0.0.1" + checksum: 10c0/ed1e62da617f71fe348907c71743b5ed550448b455f8d269f89a7c7ddb8ae6e962de3dab6a74a237b06f5eb7f6ece7a45ada8ce96d87fe972926530f91ae3311 + languageName: node + linkType: hard + +"isarray@npm:^2.0.5": + version: 2.0.5 + resolution: "isarray@npm:2.0.5" + checksum: 10c0/4199f14a7a13da2177c66c31080008b7124331956f47bca57dd0b6ea9f11687aa25e565a2c7a2b519bc86988d10398e3049a1f5df13c9f6b7664154690ae79fd + languageName: node + linkType: hard + +"isarray@npm:~1.0.0": + version: 1.0.0 + resolution: "isarray@npm:1.0.0" + checksum: 10c0/18b5be6669be53425f0b84098732670ed4e727e3af33bc7f948aac01782110eb9a18b3b329c5323bcdd3acdaae547ee077d3951317e7f133bff7105264b3003d + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 10c0/228cfa503fadc2c31596ab06ed6aa82c9976eec2bfd83397e7eaf06d0ccf42cd1dfd6743bf9aeb01aebd4156d009994c5f76ea898d2832c1fe342da923ca457d + languageName: node + linkType: hard + +"isexe@npm:^3.1.1": + version: 3.1.1 + resolution: "isexe@npm:3.1.1" + checksum: 10c0/9ec257654093443eb0a528a9c8cbba9c0ca7616ccb40abd6dde7202734d96bb86e4ac0d764f0f8cd965856aacbff2f4ce23e730dc19dfb41e3b0d865ca6fdcc7 + languageName: node + linkType: hard + +"isobject@npm:^3.0.1": + version: 3.0.1 + resolution: "isobject@npm:3.0.1" + checksum: 10c0/03344f5064a82f099a0cd1a8a407f4c0d20b7b8485e8e816c39f249e9416b06c322e8dec5b842b6bb8a06de0af9cb48e7bc1b5352f0fadc2f0abac033db3d4db + languageName: node + linkType: hard + +"iterator.prototype@npm:^1.1.2": + version: 1.1.2 + resolution: "iterator.prototype@npm:1.1.2" + dependencies: + define-properties: "npm:^1.2.1" + get-intrinsic: "npm:^1.2.1" + has-symbols: "npm:^1.0.3" + reflect.getprototypeof: "npm:^1.0.4" + set-function-name: "npm:^2.0.1" + checksum: 10c0/a32151326095e916f306990d909f6bbf23e3221999a18ba686419535dcd1749b10ded505e89334b77dc4c7a58a8508978f0eb16c2c8573e6d412eb7eb894ea79 + languageName: node + linkType: hard + +"jackspeak@npm:^3.1.2": + version: 3.4.3 + resolution: "jackspeak@npm:3.4.3" + dependencies: + "@isaacs/cliui": "npm:^8.0.2" + "@pkgjs/parseargs": "npm:^0.11.0" + dependenciesMeta: + "@pkgjs/parseargs": + optional: true + checksum: 10c0/6acc10d139eaefdbe04d2f679e6191b3abf073f111edf10b1de5302c97ec93fffeb2fdd8681ed17f16268aa9dd4f8c588ed9d1d3bffbbfa6e8bf897cbb3149b9 + languageName: node + linkType: hard + +"jest-util@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-util@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + ci-info: "npm:^3.2.0" + graceful-fs: "npm:^4.2.9" + picomatch: "npm:^2.2.3" + checksum: 10c0/bc55a8f49fdbb8f51baf31d2a4f312fb66c9db1483b82f602c9c990e659cdd7ec529c8e916d5a89452ecbcfae4949b21b40a7a59d4ffc0cd813a973ab08c8150 + languageName: node + linkType: hard + +"jest-worker@npm:^27.4.5": + version: 27.5.1 + resolution: "jest-worker@npm:27.5.1" + dependencies: + "@types/node": "npm:*" + merge-stream: "npm:^2.0.0" + supports-color: "npm:^8.0.0" + checksum: 10c0/8c4737ffd03887b3c6768e4cc3ca0269c0336c1e4b1b120943958ddb035ed2a0fc6acab6dc99631720a3720af4e708ff84fb45382ad1e83c27946adf3623969b + languageName: node + linkType: hard + +"jest-worker@npm:^29.4.3": + version: 29.7.0 + resolution: "jest-worker@npm:29.7.0" + dependencies: + "@types/node": "npm:*" + jest-util: "npm:^29.7.0" + merge-stream: "npm:^2.0.0" + supports-color: "npm:^8.0.0" + checksum: 10c0/5570a3a005b16f46c131968b8a5b56d291f9bbb85ff4217e31c80bd8a02e7de799e59a54b95ca28d5c302f248b54cbffde2d177c2f0f52ffcee7504c6eabf660 + languageName: node + linkType: hard + +"jiti@npm:^1.20.0": + version: 1.21.6 + resolution: "jiti@npm:1.21.6" + bin: + jiti: bin/jiti.js + checksum: 10c0/05b9ed58cd30d0c3ccd3c98209339e74f50abd9a17e716f65db46b6a35812103f6bde6e134be7124d01745586bca8cc5dae1d0d952267c3ebe55171949c32e56 + languageName: node + linkType: hard + +"joi@npm:^17.9.2": + version: 17.13.3 + resolution: "joi@npm:17.13.3" + dependencies: + "@hapi/hoek": "npm:^9.3.0" + "@hapi/topo": "npm:^5.1.0" + "@sideway/address": "npm:^4.1.5" + "@sideway/formula": "npm:^3.0.1" + "@sideway/pinpoint": "npm:^2.0.0" + checksum: 10c0/9262aef1da3f1bec5b03caf50c46368899fe03b8ff26cbe3d53af4584dd1049079fc97230bbf1500b6149db7cc765b9ee45f0deb24bb6fc3fa06229d7148c17f + languageName: node + linkType: hard + +"js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": + version: 4.0.0 + resolution: "js-tokens@npm:4.0.0" + checksum: 10c0/e248708d377aa058eacf2037b07ded847790e6de892bbad3dac0abba2e759cb9f121b00099a65195616badcb6eca8d14d975cb3e89eb1cfda644756402c8aeed + languageName: node + linkType: hard + +"js-yaml@npm:^3.13.1": + version: 3.14.1 + resolution: "js-yaml@npm:3.14.1" + dependencies: + argparse: "npm:^1.0.7" + esprima: "npm:^4.0.0" + bin: + js-yaml: bin/js-yaml.js + checksum: 10c0/6746baaaeac312c4db8e75fa22331d9a04cccb7792d126ed8ce6a0bbcfef0cedaddd0c5098fade53db067c09fe00aa1c957674b4765610a8b06a5a189e46433b + languageName: node + linkType: hard + +"js-yaml@npm:^4.1.0": + version: 4.1.0 + resolution: "js-yaml@npm:4.1.0" + dependencies: + argparse: "npm:^2.0.1" + bin: + js-yaml: bin/js-yaml.js + checksum: 10c0/184a24b4eaacfce40ad9074c64fd42ac83cf74d8c8cd137718d456ced75051229e5061b8633c3366b8aada17945a7a356b337828c19da92b51ae62126575018f + languageName: node + linkType: hard + +"jsbn@npm:1.1.0": + version: 1.1.0 + resolution: "jsbn@npm:1.1.0" + checksum: 10c0/4f907fb78d7b712e11dea8c165fe0921f81a657d3443dde75359ed52eb2b5d33ce6773d97985a089f09a65edd80b11cb75c767b57ba47391fee4c969f7215c96 + languageName: node + linkType: hard + +"jsesc@npm:^2.5.1": + version: 2.5.2 + resolution: "jsesc@npm:2.5.2" + bin: + jsesc: bin/jsesc + checksum: 10c0/dbf59312e0ebf2b4405ef413ec2b25abb5f8f4d9bc5fb8d9f90381622ebca5f2af6a6aa9a8578f65903f9e33990a6dc798edd0ce5586894bf0e9e31803a1de88 + languageName: node + linkType: hard + +"jsesc@npm:~0.5.0": + version: 0.5.0 + resolution: "jsesc@npm:0.5.0" + bin: + jsesc: bin/jsesc + checksum: 10c0/f93792440ae1d80f091b65f8ceddf8e55c4bb7f1a09dee5dcbdb0db5612c55c0f6045625aa6b7e8edb2e0a4feabd80ee48616dbe2d37055573a84db3d24f96d9 + languageName: node + linkType: hard + +"json-buffer@npm:3.0.1": + version: 3.0.1 + resolution: "json-buffer@npm:3.0.1" + checksum: 10c0/0d1c91569d9588e7eef2b49b59851f297f3ab93c7b35c7c221e288099322be6b562767d11e4821da500f3219542b9afd2e54c5dc573107c1126ed1080f8e96d7 + languageName: node + linkType: hard + +"json-parse-even-better-errors@npm:^2.3.0, json-parse-even-better-errors@npm:^2.3.1": + version: 2.3.1 + resolution: "json-parse-even-better-errors@npm:2.3.1" + checksum: 10c0/140932564c8f0b88455432e0f33c4cb4086b8868e37524e07e723f4eaedb9425bdc2bafd71bd1d9765bd15fd1e2d126972bc83990f55c467168c228c24d665f3 + languageName: node + linkType: hard + +"json-parse-even-better-errors@npm:^3.0.0": + version: 3.0.2 + resolution: "json-parse-even-better-errors@npm:3.0.2" + checksum: 10c0/147f12b005768abe9fab78d2521ce2b7e1381a118413d634a40e6d907d7d10f5e9a05e47141e96d6853af7cc36d2c834d0a014251be48791e037ff2f13d2b94b + languageName: node + linkType: hard + +"json-schema-traverse@npm:^0.4.1": + version: 0.4.1 + resolution: "json-schema-traverse@npm:0.4.1" + checksum: 10c0/108fa90d4cc6f08243aedc6da16c408daf81793bf903e9fd5ab21983cda433d5d2da49e40711da016289465ec2e62e0324dcdfbc06275a607fe3233fde4942ce + languageName: node + linkType: hard + +"json-schema-traverse@npm:^1.0.0": + version: 1.0.0 + resolution: "json-schema-traverse@npm:1.0.0" + checksum: 10c0/71e30015d7f3d6dc1c316d6298047c8ef98a06d31ad064919976583eb61e1018a60a0067338f0f79cabc00d84af3fcc489bd48ce8a46ea165d9541ba17fb30c6 + languageName: node + linkType: hard + +"json-stable-stringify-without-jsonify@npm:^1.0.1": + version: 1.0.1 + resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" + checksum: 10c0/cb168b61fd4de83e58d09aaa6425ef71001bae30d260e2c57e7d09a5fd82223e2f22a042dedaab8db23b7d9ae46854b08bb1f91675a8be11c5cffebef5fb66a5 + languageName: node + linkType: hard + +"json5@npm:^2.1.2, json5@npm:^2.2.3": + version: 2.2.3 + resolution: "json5@npm:2.2.3" + bin: + json5: lib/cli.js + checksum: 10c0/5a04eed94810fa55c5ea138b2f7a5c12b97c3750bc63d11e511dcecbfef758003861522a070c2272764ee0f4e3e323862f386945aeb5b85b87ee43f084ba586c + languageName: node + linkType: hard + +"jsonfile@npm:^6.0.1": + version: 6.1.0 + resolution: "jsonfile@npm:6.1.0" + dependencies: + graceful-fs: "npm:^4.1.6" + universalify: "npm:^2.0.0" + dependenciesMeta: + graceful-fs: + optional: true + checksum: 10c0/4f95b5e8a5622b1e9e8f33c96b7ef3158122f595998114d1e7f03985649ea99cb3cd99ce1ed1831ae94c8c8543ab45ebd044207612f31a56fd08462140e46865 + languageName: node + linkType: hard + +"jsx-ast-utils@npm:^2.4.1 || ^3.0.0": + version: 3.3.5 + resolution: "jsx-ast-utils@npm:3.3.5" + dependencies: + array-includes: "npm:^3.1.6" + array.prototype.flat: "npm:^1.3.1" + object.assign: "npm:^4.1.4" + object.values: "npm:^1.1.6" + checksum: 10c0/a32679e9cb55469cb6d8bbc863f7d631b2c98b7fc7bf172629261751a6e7bc8da6ae374ddb74d5fbd8b06cf0eb4572287b259813d92b36e384024ed35e4c13e1 + languageName: node + linkType: hard + +"katex@npm:^0.16.9": + version: 0.16.11 + resolution: "katex@npm:0.16.11" + dependencies: + commander: "npm:^8.3.0" + bin: + katex: cli.js + checksum: 10c0/be405d45d7228bbfeecd491e0f74d9da0066b5e7b457e3f1dc833de5b63f9e98e40d2ef6b46e1cbe577490a43338c043851da032c45aeec0cc03ad431ef6fd83 + languageName: node + linkType: hard + +"keyv@npm:^4.5.3": + version: 4.5.4 + resolution: "keyv@npm:4.5.4" + dependencies: + json-buffer: "npm:3.0.1" + checksum: 10c0/aa52f3c5e18e16bb6324876bb8b59dd02acf782a4b789c7b2ae21107fab95fab3890ed448d4f8dba80ce05391eeac4bfabb4f02a20221342982f806fa2cf271e + languageName: node + linkType: hard + +"khroma@npm:^2.0.0": + version: 2.1.0 + resolution: "khroma@npm:2.1.0" + checksum: 10c0/634d98753ff5d2540491cafeb708fc98de0d43f4e6795256d5c8f6e3ad77de93049ea41433928fda3697adf7bbe6fe27351858f6d23b78f8b5775ef314c59891 + languageName: node + linkType: hard + +"kind-of@npm:^6.0.0, kind-of@npm:^6.0.2": + version: 6.0.3 + resolution: "kind-of@npm:6.0.3" + checksum: 10c0/61cdff9623dabf3568b6445e93e31376bee1cdb93f8ba7033d86022c2a9b1791a1d9510e026e6465ebd701a6dd2f7b0808483ad8838341ac52f003f512e0b4c4 + languageName: node + linkType: hard + +"kleur@npm:^3.0.3": + version: 3.0.3 + resolution: "kleur@npm:3.0.3" + checksum: 10c0/cd3a0b8878e7d6d3799e54340efe3591ca787d9f95f109f28129bdd2915e37807bf8918bb295ab86afb8c82196beec5a1adcaf29042ce3f2bd932b038fe3aa4b + languageName: node + linkType: hard + +"kleur@npm:^4.0.3": + version: 4.1.5 + resolution: "kleur@npm:4.1.5" + checksum: 10c0/e9de6cb49657b6fa70ba2d1448fd3d691a5c4370d8f7bbf1c2f64c24d461270f2117e1b0afe8cb3114f13bbd8e51de158c2a224953960331904e636a5e4c0f2a + languageName: node + linkType: hard + +"klona@npm:^2.0.4": + version: 2.0.6 + resolution: "klona@npm:2.0.6" + checksum: 10c0/94eed2c6c2ce99f409df9186a96340558897b3e62a85afdc1ee39103954d2ebe1c1c4e9fe2b0952771771fa96d70055ede8b27962a7021406374fdb695fd4d01 + languageName: node + linkType: hard + +"latest-version@npm:^7.0.0": + version: 7.0.0 + resolution: "latest-version@npm:7.0.0" + dependencies: + package-json: "npm:^8.1.0" + checksum: 10c0/68045f5e419e005c12e595ae19687dd88317dd0108b83a8773197876622c7e9d164fe43aacca4f434b2cba105c92848b89277f658eabc5d50e81fb743bbcddb1 + languageName: node + linkType: hard + +"launch-editor@npm:^2.6.0": + version: 2.8.1 + resolution: "launch-editor@npm:2.8.1" + dependencies: + picocolors: "npm:^1.0.0" + shell-quote: "npm:^1.8.1" + checksum: 10c0/e18fcda6617a995306602871c7a71ddcfdd82d88a57508ae970be86bfb6685f131cf9ddb8896df4e8e4cde6d0e2d14318d2b41314eaae6abf03ca205948daa27 + languageName: node + linkType: hard + +"layout-base@npm:^1.0.0": + version: 1.0.2 + resolution: "layout-base@npm:1.0.2" + checksum: 10c0/2a55d0460fd9f6ed53d7e301b9eb3dea19bda03815d616a40665ce6dc75c1f4d62e1ca19a897da1cfaf6de1b91de59cd6f2f79ba1258f3d7fccc7d46ca7f3337 + languageName: node + linkType: hard + +"leven@npm:^3.1.0": + version: 3.1.0 + resolution: "leven@npm:3.1.0" + checksum: 10c0/cd778ba3fbab0f4d0500b7e87d1f6e1f041507c56fdcd47e8256a3012c98aaee371d4c15e0a76e0386107af2d42e2b7466160a2d80688aaa03e66e49949f42df + languageName: node + linkType: hard + +"levn@npm:^0.4.1": + version: 0.4.1 + resolution: "levn@npm:0.4.1" + dependencies: + prelude-ls: "npm:^1.2.1" + type-check: "npm:~0.4.0" + checksum: 10c0/effb03cad7c89dfa5bd4f6989364bfc79994c2042ec5966cb9b95990e2edee5cd8969ddf42616a0373ac49fac1403437deaf6e9050fbbaa3546093a59b9ac94e + languageName: node + linkType: hard + +"lilconfig@npm:^3.1.1": + version: 3.1.2 + resolution: "lilconfig@npm:3.1.2" + checksum: 10c0/f059630b1a9bddaeba83059db00c672b64dc14074e9f232adce32b38ca1b5686ab737eb665c5ba3c32f147f0002b4bee7311ad0386a9b98547b5623e87071fbe + languageName: node + linkType: hard + +"lines-and-columns@npm:^1.1.6": + version: 1.2.4 + resolution: "lines-and-columns@npm:1.2.4" + checksum: 10c0/3da6ee62d4cd9f03f5dc90b4df2540fb85b352081bee77fe4bbcd12c9000ead7f35e0a38b8d09a9bb99b13223446dd8689ff3c4959807620726d788701a83d2d + languageName: node + linkType: hard + +"lines-and-columns@npm:^2.0.3": + version: 2.0.4 + resolution: "lines-and-columns@npm:2.0.4" + checksum: 10c0/4db28bf065cd7ad897c0700f22d3d0d7c5ed6777e138861c601c496d545340df3fc19e18bd04ff8d95a246a245eb55685b82ca2f8c2ca53a008e9c5316250379 + languageName: node + linkType: hard + +"load-plugin@npm:^6.0.0": + version: 6.0.3 + resolution: "load-plugin@npm:6.0.3" + dependencies: + "@npmcli/config": "npm:^8.0.0" + import-meta-resolve: "npm:^4.0.0" + checksum: 10c0/cbbd4e18472a0ed543b6d60e867a1e2aae385205fcaa76d300ab5a72697e057422cd1e6ff2ba19755c55a86b3d53e53b81a814c757be720895ba525d05f75797 + languageName: node + linkType: hard + +"loader-runner@npm:^4.2.0": + version: 4.3.0 + resolution: "loader-runner@npm:4.3.0" + checksum: 10c0/a44d78aae0907a72f73966fe8b82d1439c8c485238bd5a864b1b9a2a3257832effa858790241e6b37876b5446a78889adf2fcc8dd897ce54c089ecc0a0ce0bf0 + languageName: node + linkType: hard + +"loader-utils@npm:^2.0.0": + version: 2.0.4 + resolution: "loader-utils@npm:2.0.4" + dependencies: + big.js: "npm:^5.2.2" + emojis-list: "npm:^3.0.0" + json5: "npm:^2.1.2" + checksum: 10c0/d5654a77f9d339ec2a03d88221a5a695f337bf71eb8dea031b3223420bb818964ba8ed0069145c19b095f6c8b8fd386e602a3fc7ca987042bd8bb1dcc90d7100 + languageName: node + linkType: hard + +"loader-utils@npm:^3.2.0": + version: 3.3.1 + resolution: "loader-utils@npm:3.3.1" + checksum: 10c0/f2af4eb185ac5bf7e56e1337b666f90744e9f443861ac521b48f093fb9e8347f191c8960b4388a3365147d218913bc23421234e7788db69f385bacfefa0b4758 + languageName: node + linkType: hard + +"locate-path@npm:^3.0.0": + version: 3.0.0 + resolution: "locate-path@npm:3.0.0" + dependencies: + p-locate: "npm:^3.0.0" + path-exists: "npm:^3.0.0" + checksum: 10c0/3db394b7829a7fe2f4fbdd25d3c4689b85f003c318c5da4052c7e56eed697da8f1bce5294f685c69ff76e32cba7a33629d94396976f6d05fb7f4c755c5e2ae8b + languageName: node + linkType: hard + +"locate-path@npm:^6.0.0": + version: 6.0.0 + resolution: "locate-path@npm:6.0.0" + dependencies: + p-locate: "npm:^5.0.0" + checksum: 10c0/d3972ab70dfe58ce620e64265f90162d247e87159b6126b01314dd67be43d50e96a50b517bce2d9452a79409c7614054c277b5232377de50416564a77ac7aad3 + languageName: node + linkType: hard + +"locate-path@npm:^7.1.0": + version: 7.2.0 + resolution: "locate-path@npm:7.2.0" + dependencies: + p-locate: "npm:^6.0.0" + checksum: 10c0/139e8a7fe11cfbd7f20db03923cacfa5db9e14fa14887ea121345597472b4a63c1a42a8a5187defeeff6acf98fd568da7382aa39682d38f0af27433953a97751 + languageName: node + linkType: hard + +"lodash-es@npm:^4.17.21": + version: 4.17.21 + resolution: "lodash-es@npm:4.17.21" + checksum: 10c0/fb407355f7e6cd523a9383e76e6b455321f0f153a6c9625e21a8827d10c54c2a2341bd2ae8d034358b60e07325e1330c14c224ff582d04612a46a4f0479ff2f2 + languageName: node + linkType: hard + +"lodash.debounce@npm:^4.0.8": + version: 4.0.8 + resolution: "lodash.debounce@npm:4.0.8" + checksum: 10c0/762998a63e095412b6099b8290903e0a8ddcb353ac6e2e0f2d7e7d03abd4275fe3c689d88960eb90b0dde4f177554d51a690f22a343932ecbc50a5d111849987 + languageName: node + linkType: hard + +"lodash.memoize@npm:^4.1.2": + version: 4.1.2 + resolution: "lodash.memoize@npm:4.1.2" + checksum: 10c0/c8713e51eccc650422716a14cece1809cfe34bc5ab5e242b7f8b4e2241c2483697b971a604252807689b9dd69bfe3a98852e19a5b89d506b000b4187a1285df8 + languageName: node + linkType: hard + +"lodash.merge@npm:^4.6.0, lodash.merge@npm:^4.6.2": + version: 4.6.2 + resolution: "lodash.merge@npm:4.6.2" + checksum: 10c0/402fa16a1edd7538de5b5903a90228aa48eb5533986ba7fa26606a49db2572bf414ff73a2c9f5d5fd36b31c46a5d5c7e1527749c07cbcf965ccff5fbdf32c506 + languageName: node + linkType: hard + +"lodash.uniq@npm:^4.5.0": + version: 4.5.0 + resolution: "lodash.uniq@npm:4.5.0" + checksum: 10c0/262d400bb0952f112162a320cc4a75dea4f66078b9e7e3075ffbc9c6aa30b3e9df3cf20e7da7d566105e1ccf7804e4fbd7d804eee0b53de05d83f16ffbf41c5e + languageName: node + linkType: hard + +"lodash@npm:^4.17.20, lodash@npm:^4.17.21": + version: 4.17.21 + resolution: "lodash@npm:4.17.21" + checksum: 10c0/d8cbea072bb08655bb4c989da418994b073a608dffa608b09ac04b43a791b12aeae7cd7ad919aa4c925f33b48490b5cfe6c1f71d827956071dae2e7bb3a6b74c + languageName: node + linkType: hard + +"loglevel-colored-level-prefix@npm:^1.0.0": + version: 1.0.0 + resolution: "loglevel-colored-level-prefix@npm:1.0.0" + dependencies: + chalk: "npm:^1.1.3" + loglevel: "npm:^1.4.1" + checksum: 10c0/00fd732866de326001fa2d9b87166a4fbe41c7e0050bef0d612a8852e0ad09ad3f104aa30311e761d9d75f64d5afb6568e839826900baee0a24b7197012774d3 + languageName: node + linkType: hard + +"loglevel@npm:^1.4.1": + version: 1.9.1 + resolution: "loglevel@npm:1.9.1" + checksum: 10c0/152f0501cea367cf998c844a38b19f0b5af555756ad7d8650214a1f8c6a5b045e31b8cf5dae27d28339a061624ce3f618aadb333aed386cac041d6ddc5101a39 + languageName: node + linkType: hard + +"longest-streak@npm:^3.0.0": + version: 3.1.0 + resolution: "longest-streak@npm:3.1.0" + checksum: 10c0/7c2f02d0454b52834d1bcedef79c557bd295ee71fdabb02d041ff3aa9da48a90b5df7c0409156dedbc4df9b65da18742652aaea4759d6ece01f08971af6a7eaa + languageName: node + linkType: hard + +"loose-envify@npm:^1.0.0, loose-envify@npm:^1.1.0, loose-envify@npm:^1.2.0, loose-envify@npm:^1.3.1, loose-envify@npm:^1.4.0": + version: 1.4.0 + resolution: "loose-envify@npm:1.4.0" + dependencies: + js-tokens: "npm:^3.0.0 || ^4.0.0" + bin: + loose-envify: cli.js + checksum: 10c0/655d110220983c1a4b9c0c679a2e8016d4b67f6e9c7b5435ff5979ecdb20d0813f4dec0a08674fcbdd4846a3f07edbb50a36811fd37930b94aaa0d9daceb017e + languageName: node + linkType: hard + +"lower-case@npm:^2.0.2": + version: 2.0.2 + resolution: "lower-case@npm:2.0.2" + dependencies: + tslib: "npm:^2.0.3" + checksum: 10c0/3d925e090315cf7dc1caa358e0477e186ffa23947740e4314a7429b6e62d72742e0bbe7536a5ae56d19d7618ce998aba05caca53c2902bd5742fdca5fc57fd7b + languageName: node + linkType: hard + +"lowercase-keys@npm:^3.0.0": + version: 3.0.0 + resolution: "lowercase-keys@npm:3.0.0" + checksum: 10c0/ef62b9fa5690ab0a6e4ef40c94efce68e3ed124f583cc3be38b26ff871da0178a28b9a84ce0c209653bb25ca135520ab87fea7cd411a54ac4899cb2f30501430 + languageName: node + linkType: hard + +"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0": + version: 10.4.3 + resolution: "lru-cache@npm:10.4.3" + checksum: 10c0/ebd04fbca961e6c1d6c0af3799adcc966a1babe798f685bb84e6599266599cd95d94630b10262f5424539bc4640107e8a33aa28585374abf561d30d16f4b39fb + languageName: node + linkType: hard + +"lru-cache@npm:^5.1.1": + version: 5.1.1 + resolution: "lru-cache@npm:5.1.1" + dependencies: + yallist: "npm:^3.0.2" + checksum: 10c0/89b2ef2ef45f543011e38737b8a8622a2f8998cddf0e5437174ef8f1f70a8b9d14a918ab3e232cb3ba343b7abddffa667f0b59075b2b80e6b4d63c3de6127482 + languageName: node + linkType: hard + +"make-fetch-happen@npm:^13.0.0": + version: 13.0.1 + resolution: "make-fetch-happen@npm:13.0.1" + dependencies: + "@npmcli/agent": "npm:^2.0.0" + cacache: "npm:^18.0.0" + http-cache-semantics: "npm:^4.1.1" + is-lambda: "npm:^1.0.1" + minipass: "npm:^7.0.2" + minipass-fetch: "npm:^3.0.0" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + negotiator: "npm:^0.6.3" + proc-log: "npm:^4.2.0" + promise-retry: "npm:^2.0.1" + ssri: "npm:^10.0.0" + checksum: 10c0/df5f4dbb6d98153b751bccf4dc4cc500de85a96a9331db9805596c46aa9f99d9555983954e6c1266d9f981ae37a9e4647f42b9a4bb5466f867f4012e582c9e7e + languageName: node + linkType: hard + +"markdown-extensions@npm:^2.0.0": + version: 2.0.0 + resolution: "markdown-extensions@npm:2.0.0" + checksum: 10c0/406139da2aa0d5ebad86195c8e8c02412f873c452b4c087ae7bc767af37956141be449998223bb379eea179b5fd38dfa610602b6f29c22ddab5d51e627a7e41d + languageName: node + linkType: hard + +"markdown-table@npm:^3.0.0": + version: 3.0.3 + resolution: "markdown-table@npm:3.0.3" + checksum: 10c0/47433a3f31e4637a184e38e873ab1d2fadfb0106a683d466fec329e99a2d8dfa09f091fa42202c6f13ec94aef0199f449a684b28042c636f2edbc1b7e1811dcd + languageName: node + linkType: hard + +"mdast-util-directive@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-directive@npm:3.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + "@types/unist": "npm:^3.0.0" + devlop: "npm:^1.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + parse-entities: "npm:^4.0.0" + stringify-entities: "npm:^4.0.0" + unist-util-visit-parents: "npm:^6.0.0" + checksum: 10c0/4a71b27f5f0c4ead5293a12d4118d4d832951ac0efdeba4af2dd78f5679f9cabee80feb3619f219a33674c12df3780def1bd3150d7298aaf0ef734f0dfbab999 + languageName: node + linkType: hard + +"mdast-util-find-and-replace@npm:^3.0.0, mdast-util-find-and-replace@npm:^3.0.1": + version: 3.0.1 + resolution: "mdast-util-find-and-replace@npm:3.0.1" + dependencies: + "@types/mdast": "npm:^4.0.0" + escape-string-regexp: "npm:^5.0.0" + unist-util-is: "npm:^6.0.0" + unist-util-visit-parents: "npm:^6.0.0" + checksum: 10c0/1faca98c4ee10a919f23b8cc6d818e5bb6953216a71dfd35f51066ed5d51ef86e5063b43dcfdc6061cd946e016a9f0d44a1dccadd58452cf4ed14e39377f00cb + languageName: node + linkType: hard + +"mdast-util-from-markdown@npm:^0.8.5": + version: 0.8.5 + resolution: "mdast-util-from-markdown@npm:0.8.5" + dependencies: + "@types/mdast": "npm:^3.0.0" + mdast-util-to-string: "npm:^2.0.0" + micromark: "npm:~2.11.0" + parse-entities: "npm:^2.0.0" + unist-util-stringify-position: "npm:^2.0.0" + checksum: 10c0/86e7589e574378817c180f10ab602db844b6b71b7b1769314947a02ef42ac5c1435f5163d02a975ae8cdab8b6e6176acbd9188da1848ddd5f0d5e09d0291c870 + languageName: node + linkType: hard + +"mdast-util-from-markdown@npm:^1.3.0": + version: 1.3.1 + resolution: "mdast-util-from-markdown@npm:1.3.1" + dependencies: + "@types/mdast": "npm:^3.0.0" + "@types/unist": "npm:^2.0.0" + decode-named-character-reference: "npm:^1.0.0" + mdast-util-to-string: "npm:^3.1.0" + micromark: "npm:^3.0.0" + micromark-util-decode-numeric-character-reference: "npm:^1.0.0" + micromark-util-decode-string: "npm:^1.0.0" + micromark-util-normalize-identifier: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + unist-util-stringify-position: "npm:^3.0.0" + uvu: "npm:^0.5.0" + checksum: 10c0/f4e901bf2a2e93fe35a339e0cff581efacce2f7117cd5652e9a270847bd7e2508b3e717b7b4156af54d4f896d63033e06ff9fafbf59a1d46fe17dd5e2a3f7846 + languageName: node + linkType: hard + +"mdast-util-from-markdown@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-from-markdown@npm:2.0.1" + dependencies: + "@types/mdast": "npm:^4.0.0" + "@types/unist": "npm:^3.0.0" + decode-named-character-reference: "npm:^1.0.0" + devlop: "npm:^1.0.0" + mdast-util-to-string: "npm:^4.0.0" + micromark: "npm:^4.0.0" + micromark-util-decode-numeric-character-reference: "npm:^2.0.0" + micromark-util-decode-string: "npm:^2.0.0" + micromark-util-normalize-identifier: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + unist-util-stringify-position: "npm:^4.0.0" + checksum: 10c0/496596bc6419200ff6258531a0ebcaee576a5c169695f5aa296a79a85f2a221bb9247d565827c709a7c2acfb56ae3c3754bf483d86206617bd299a9658c8121c + languageName: node + linkType: hard + +"mdast-util-frontmatter@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-frontmatter@npm:2.0.1" + dependencies: + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.0.0" + escape-string-regexp: "npm:^5.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + micromark-extension-frontmatter: "npm:^2.0.0" + checksum: 10c0/d9b0b70dd9c574cc0220d4e05dd8e9d86ac972a6a5af9e0c49c839b31cb750d4313445cfbbdf9264a7fbe3f8c8d920b45358b8500f4286e6b9dc830095b25b9a + languageName: node + linkType: hard + +"mdast-util-gfm-autolink-literal@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-gfm-autolink-literal@npm:2.0.1" + dependencies: + "@types/mdast": "npm:^4.0.0" + ccount: "npm:^2.0.0" + devlop: "npm:^1.0.0" + mdast-util-find-and-replace: "npm:^3.0.0" + micromark-util-character: "npm:^2.0.0" + checksum: 10c0/963cd22bd42aebdec7bdd0a527c9494d024d1ad0739c43dc040fee35bdfb5e29c22564330a7418a72b5eab51d47a6eff32bc0255ef3ccb5cebfe8970e91b81b6 + languageName: node + linkType: hard + +"mdast-util-gfm-footnote@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-footnote@npm:2.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.1.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + micromark-util-normalize-identifier: "npm:^2.0.0" + checksum: 10c0/c673b22bea24740235e74cfd66765b41a2fa540334f7043fa934b94938b06b7d3c93f2d3b33671910c5492b922c0cc98be833be3b04cfed540e0679650a6d2de + languageName: node + linkType: hard + +"mdast-util-gfm-strikethrough@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-strikethrough@npm:2.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/b053e93d62c7545019bd914271ea9e5667ad3b3b57d16dbf68e56fea39a7e19b4a345e781312714eb3d43fdd069ff7ee22a3ca7f6149dfa774554f19ce3ac056 + languageName: node + linkType: hard + +"mdast-util-gfm-table@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-table@npm:2.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.0.0" + markdown-table: "npm:^3.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/128af47c503a53bd1c79f20642561e54a510ad5e2db1e418d28fefaf1294ab839e6c838e341aef5d7e404f9170b9ca3d1d89605f234efafde93ee51174a6e31e + languageName: node + linkType: hard + +"mdast-util-gfm-task-list-item@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-task-list-item@npm:2.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/258d725288482b636c0a376c296431390c14b4f29588675297cb6580a8598ed311fc73ebc312acfca12cc8546f07a3a285a53a3b082712e2cbf5c190d677d834 + languageName: node + linkType: hard + +"mdast-util-gfm@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-gfm@npm:3.0.0" + dependencies: + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-gfm-autolink-literal: "npm:^2.0.0" + mdast-util-gfm-footnote: "npm:^2.0.0" + mdast-util-gfm-strikethrough: "npm:^2.0.0" + mdast-util-gfm-table: "npm:^2.0.0" + mdast-util-gfm-task-list-item: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/91596fe9bf3e4a0c546d0c57f88106c17956d9afbe88ceb08308e4da2388aff64489d649ddad599caecfdf755fc3ae4c9b82c219b85281bc0586b67599881fca + languageName: node + linkType: hard + +"mdast-util-mdx-expression@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-mdx-expression@npm:2.0.0" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/512848cbc44b9dc7cffc1bb3f95f7e67f0d6562870e56a67d25647f475d411e136b915ba417c8069fb36eac1839d0209fb05fb323d377f35626a82fcb0879363 + languageName: node + linkType: hard + +"mdast-util-mdx-jsx@npm:^3.0.0": + version: 3.1.2 + resolution: "mdast-util-mdx-jsx@npm:3.1.2" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + "@types/mdast": "npm:^4.0.0" + "@types/unist": "npm:^3.0.0" + ccount: "npm:^2.0.0" + devlop: "npm:^1.1.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + parse-entities: "npm:^4.0.0" + stringify-entities: "npm:^4.0.0" + unist-util-remove-position: "npm:^5.0.0" + unist-util-stringify-position: "npm:^4.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/855b60c3db9bde2fe142bd366597f7bd5892fc288428ba054e26ffcffc07bfe5648c0792d614ba6e08b1eab9784ffc3c1267cf29dfc6db92b419d68b5bcd487d + languageName: node + linkType: hard + +"mdast-util-mdx@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-mdx@npm:3.0.0" + dependencies: + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-mdx-expression: "npm:^2.0.0" + mdast-util-mdx-jsx: "npm:^3.0.0" + mdast-util-mdxjs-esm: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/4faea13f77d6bc9aa64ee41a5e4779110b73444a17fda363df6ebe880ecfa58b321155b71f8801c3faa6d70d6222a32a00cbd6dbf5fad8db417f4688bc9c74e1 + languageName: node + linkType: hard + +"mdast-util-mdxjs-esm@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-mdxjs-esm@npm:2.0.1" + dependencies: + "@types/estree-jsx": "npm:^1.0.0" + "@types/hast": "npm:^3.0.0" + "@types/mdast": "npm:^4.0.0" + devlop: "npm:^1.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + checksum: 10c0/5bda92fc154141705af2b804a534d891f28dac6273186edf1a4c5e3f045d5b01dbcac7400d27aaf91b7e76e8dce007c7b2fdf136c11ea78206ad00bdf9db46bc + languageName: node + linkType: hard + +"mdast-util-phrasing@npm:^4.0.0": + version: 4.1.0 + resolution: "mdast-util-phrasing@npm:4.1.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + unist-util-is: "npm:^6.0.0" + checksum: 10c0/bf6c31d51349aa3d74603d5e5a312f59f3f65662ed16c58017169a5fb0f84ca98578f626c5ee9e4aa3e0a81c996db8717096705521bddb4a0185f98c12c9b42f + languageName: node + linkType: hard + +"mdast-util-to-hast@npm:^13.0.0": + version: 13.2.0 + resolution: "mdast-util-to-hast@npm:13.2.0" + dependencies: + "@types/hast": "npm:^3.0.0" + "@types/mdast": "npm:^4.0.0" + "@ungap/structured-clone": "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-util-sanitize-uri: "npm:^2.0.0" + trim-lines: "npm:^3.0.0" + unist-util-position: "npm:^5.0.0" + unist-util-visit: "npm:^5.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/9ee58def9287df8350cbb6f83ced90f9c088d72d4153780ad37854f87144cadc6f27b20347073b285173b1649b0723ddf0b9c78158608a804dcacb6bda6e1816 + languageName: node + linkType: hard + +"mdast-util-to-markdown@npm:^2.0.0": + version: 2.1.0 + resolution: "mdast-util-to-markdown@npm:2.1.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + "@types/unist": "npm:^3.0.0" + longest-streak: "npm:^3.0.0" + mdast-util-phrasing: "npm:^4.0.0" + mdast-util-to-string: "npm:^4.0.0" + micromark-util-decode-string: "npm:^2.0.0" + unist-util-visit: "npm:^5.0.0" + zwitch: "npm:^2.0.0" + checksum: 10c0/8bd37a9627a438ef6418d6642661904d0cc03c5c732b8b018a8e238ef5cc82fe8aef1940b19c6f563245e58b9659f35e527209bd3fe145f3c723ba14d18fc3e6 + languageName: node + linkType: hard + +"mdast-util-to-string@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-to-string@npm:2.0.0" + checksum: 10c0/a4231085133cdfec24644b694c13661e5a01d26716be0105b6792889faa04b8030e4abbf72d4be3363098b2b38b2b98f1f1f1f0858eb6580dc04e2aca1436a37 + languageName: node + linkType: hard + +"mdast-util-to-string@npm:^3.1.0": + version: 3.2.0 + resolution: "mdast-util-to-string@npm:3.2.0" + dependencies: + "@types/mdast": "npm:^3.0.0" + checksum: 10c0/112f4bf0f6758dcb95deffdcf37afba7eaecdfe2ee13252de031723094d4d55220e147326690a8b91244758e2d678e7aeb1fdd0fa6ef3317c979bc42effd9a21 + languageName: node + linkType: hard + +"mdast-util-to-string@npm:^4.0.0": + version: 4.0.0 + resolution: "mdast-util-to-string@npm:4.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + checksum: 10c0/2d3c1af29bf3fe9c20f552ee9685af308002488f3b04b12fa66652c9718f66f41a32f8362aa2d770c3ff464c034860b41715902ada2306bb0a055146cef064d7 + languageName: node + linkType: hard + +"mdn-data@npm:2.0.28": + version: 2.0.28 + resolution: "mdn-data@npm:2.0.28" + checksum: 10c0/20000932bc4cd1cde9cba4e23f08cc4f816398af4c15ec81040ed25421d6bf07b5cf6b17095972577fb498988f40f4cb589e3169b9357bb436a12d8e07e5ea7b + languageName: node + linkType: hard + +"mdn-data@npm:2.0.30": + version: 2.0.30 + resolution: "mdn-data@npm:2.0.30" + checksum: 10c0/a2c472ea16cee3911ae742593715aa4c634eb3d4b9f1e6ada0902aa90df13dcbb7285d19435f3ff213ebaa3b2e0c0265c1eb0e3fb278fda7f8919f046a410cd9 + languageName: node + linkType: hard + +"media-typer@npm:0.3.0": + version: 0.3.0 + resolution: "media-typer@npm:0.3.0" + checksum: 10c0/d160f31246907e79fed398470285f21bafb45a62869dc469b1c8877f3f064f5eabc4bcc122f9479b8b605bc5c76187d7871cf84c4ee3ecd3e487da1993279928 + languageName: node + linkType: hard + +"medium-zoom@npm:^1.0.8": + version: 1.1.0 + resolution: "medium-zoom@npm:1.1.0" + checksum: 10c0/7d1f05e8eab045c33d7c04d4ee7bf04f5246cf7a720d7b5f5a51c36ab23666e363bcbb6bffae50b5948d5eb19361914cb0e26a1fce5c1fff7a266bc0217893f3 + languageName: node + linkType: hard + +"memfs@npm:^3.1.2, memfs@npm:^3.4.3": + version: 3.5.3 + resolution: "memfs@npm:3.5.3" + dependencies: + fs-monkey: "npm:^1.0.4" + checksum: 10c0/038fc81bce17ea92dde15aaa68fa0fdaf4960c721ce3ffc7c2cb87a259333f5159784ea48b3b72bf9e054254d9d0d0d5209d0fdc3d07d08653a09933b168fbd7 + languageName: node + linkType: hard + +"merge-descriptors@npm:1.0.3": + version: 1.0.3 + resolution: "merge-descriptors@npm:1.0.3" + checksum: 10c0/866b7094afd9293b5ea5dcd82d71f80e51514bed33b4c4e9f516795dc366612a4cbb4dc94356e943a8a6914889a914530badff27f397191b9b75cda20b6bae93 + languageName: node + linkType: hard + +"merge-stream@npm:^2.0.0": + version: 2.0.0 + resolution: "merge-stream@npm:2.0.0" + checksum: 10c0/867fdbb30a6d58b011449b8885601ec1690c3e41c759ecd5a9d609094f7aed0096c37823ff4a7190ef0b8f22cc86beb7049196ff68c016e3b3c671d0dac91ce5 + languageName: node + linkType: hard + +"merge2@npm:^1.3.0, merge2@npm:^1.4.1": + version: 1.4.1 + resolution: "merge2@npm:1.4.1" + checksum: 10c0/254a8a4605b58f450308fc474c82ac9a094848081bf4c06778200207820e5193726dc563a0d2c16468810516a5c97d9d3ea0ca6585d23c58ccfff2403e8dbbeb + languageName: node + linkType: hard + +"mermaid@npm:^10.4.0": + version: 10.9.1 + resolution: "mermaid@npm:10.9.1" + dependencies: + "@braintree/sanitize-url": "npm:^6.0.1" + "@types/d3-scale": "npm:^4.0.3" + "@types/d3-scale-chromatic": "npm:^3.0.0" + cytoscape: "npm:^3.28.1" + cytoscape-cose-bilkent: "npm:^4.1.0" + d3: "npm:^7.4.0" + d3-sankey: "npm:^0.12.3" + dagre-d3-es: "npm:7.0.10" + dayjs: "npm:^1.11.7" + dompurify: "npm:^3.0.5" + elkjs: "npm:^0.9.0" + katex: "npm:^0.16.9" + khroma: "npm:^2.0.0" + lodash-es: "npm:^4.17.21" + mdast-util-from-markdown: "npm:^1.3.0" + non-layered-tidy-tree-layout: "npm:^2.0.2" + stylis: "npm:^4.1.3" + ts-dedent: "npm:^2.2.0" + uuid: "npm:^9.0.0" + web-worker: "npm:^1.2.0" + checksum: 10c0/034f326682e3e478e4bd85e418cfef00773db4432301b858247c8d4bf813e67fa1901e8548fc490eafe4c9c215c9fb96dead73007ff317ee99973cf4f63c8791 + languageName: node + linkType: hard + +"methods@npm:~1.1.2": + version: 1.1.2 + resolution: "methods@npm:1.1.2" + checksum: 10c0/bdf7cc72ff0a33e3eede03708c08983c4d7a173f91348b4b1e4f47d4cdbf734433ad971e7d1e8c77247d9e5cd8adb81ea4c67b0a2db526b758b2233d7814b8b2 + languageName: node + linkType: hard + +"micromark-core-commonmark@npm:^1.0.1": + version: 1.1.0 + resolution: "micromark-core-commonmark@npm:1.1.0" + dependencies: + decode-named-character-reference: "npm:^1.0.0" + micromark-factory-destination: "npm:^1.0.0" + micromark-factory-label: "npm:^1.0.0" + micromark-factory-space: "npm:^1.0.0" + micromark-factory-title: "npm:^1.0.0" + micromark-factory-whitespace: "npm:^1.0.0" + micromark-util-character: "npm:^1.0.0" + micromark-util-chunked: "npm:^1.0.0" + micromark-util-classify-character: "npm:^1.0.0" + micromark-util-html-tag-name: "npm:^1.0.0" + micromark-util-normalize-identifier: "npm:^1.0.0" + micromark-util-resolve-all: "npm:^1.0.0" + micromark-util-subtokenize: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.1" + uvu: "npm:^0.5.0" + checksum: 10c0/b3bf7b7004ce7dbb3ae151dcca4db1d12546f1b943affb2418da4b90b9ce59357373c433ee2eea4c868aee0791dafa355aeed19f5ef2b0acaf271f32f1ecbe6a + languageName: node + linkType: hard + +"micromark-core-commonmark@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-core-commonmark@npm:2.0.1" + dependencies: + decode-named-character-reference: "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-factory-destination: "npm:^2.0.0" + micromark-factory-label: "npm:^2.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-factory-title: "npm:^2.0.0" + micromark-factory-whitespace: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-chunked: "npm:^2.0.0" + micromark-util-classify-character: "npm:^2.0.0" + micromark-util-html-tag-name: "npm:^2.0.0" + micromark-util-normalize-identifier: "npm:^2.0.0" + micromark-util-resolve-all: "npm:^2.0.0" + micromark-util-subtokenize: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/a0b280b1b6132f600518e72cb29a4dd1b2175b85f5ed5b25d2c5695e42b876b045971370daacbcfc6b4ce8cf7acbf78dd3a0284528fb422b450144f4b3bebe19 + languageName: node + linkType: hard + +"micromark-extension-directive@npm:^3.0.0": + version: 3.0.1 + resolution: "micromark-extension-directive@npm:3.0.1" + dependencies: + devlop: "npm:^1.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-factory-whitespace: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + parse-entities: "npm:^4.0.0" + checksum: 10c0/9d226fba0ce18f326d2b28cf2b981c78f6c0c7c2f85e810bf4b12a788dfa4b694386589b081da165227da573ff547238f39c5258d09954b055f167bba1af4983 + languageName: node + linkType: hard + +"micromark-extension-frontmatter@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-frontmatter@npm:2.0.0" + dependencies: + fault: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/7d0d876e598917a67146d29f536d6fbbf9d1b2401a77e2f64a3f80f934a63ff26fa94b01759c9185c24b2a91e4e6abf908fa7aa246f00a7778a6b37a17464300 + languageName: node + linkType: hard + +"micromark-extension-gfm-autolink-literal@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-extension-gfm-autolink-literal@npm:2.1.0" + dependencies: + micromark-util-character: "npm:^2.0.0" + micromark-util-sanitize-uri: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/84e6fbb84ea7c161dfa179665dc90d51116de4c28f3e958260c0423e5a745372b7dcbc87d3cde98213b532e6812f847eef5ae561c9397d7f7da1e59872ef3efe + languageName: node + linkType: hard + +"micromark-extension-gfm-footnote@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-extension-gfm-footnote@npm:2.1.0" + dependencies: + devlop: "npm:^1.0.0" + micromark-core-commonmark: "npm:^2.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-normalize-identifier: "npm:^2.0.0" + micromark-util-sanitize-uri: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/d172e4218968b7371b9321af5cde8c77423f73b233b2b0fcf3ff6fd6f61d2e0d52c49123a9b7910612478bf1f0d5e88c75a3990dd68f70f3933fe812b9f77edc + languageName: node + linkType: hard + +"micromark-extension-gfm-strikethrough@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-extension-gfm-strikethrough@npm:2.1.0" + dependencies: + devlop: "npm:^1.0.0" + micromark-util-chunked: "npm:^2.0.0" + micromark-util-classify-character: "npm:^2.0.0" + micromark-util-resolve-all: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/ef4f248b865bdda71303b494671b7487808a340b25552b11ca6814dff3fcfaab9be8d294643060bbdb50f79313e4a686ab18b99cbe4d3ee8a4170fcd134234fb + languageName: node + linkType: hard + +"micromark-extension-gfm-table@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-extension-gfm-table@npm:2.1.0" + dependencies: + devlop: "npm:^1.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/c1b564ab68576406046d825b9574f5b4dbedbb5c44bede49b5babc4db92f015d9057dd79d8e0530f2fecc8970a695c40ac2e5e1d4435ccf3ef161038d0d1463b + languageName: node + linkType: hard + +"micromark-extension-gfm-tagfilter@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-tagfilter@npm:2.0.0" + dependencies: + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/995558843fff137ae4e46aecb878d8a4691cdf23527dcf1e2f0157d66786be9f7bea0109c52a8ef70e68e3f930af811828ba912239438e31a9cfb9981f44d34d + languageName: node + linkType: hard + +"micromark-extension-gfm-task-list-item@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-extension-gfm-task-list-item@npm:2.1.0" + dependencies: + devlop: "npm:^1.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/78aa537d929e9309f076ba41e5edc99f78d6decd754b6734519ccbbfca8abd52e1c62df68d41a6ae64d2a3fc1646cea955893c79680b0b4385ced4c52296181f + languageName: node + linkType: hard + +"micromark-extension-gfm@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-gfm@npm:3.0.0" + dependencies: + micromark-extension-gfm-autolink-literal: "npm:^2.0.0" + micromark-extension-gfm-footnote: "npm:^2.0.0" + micromark-extension-gfm-strikethrough: "npm:^2.0.0" + micromark-extension-gfm-table: "npm:^2.0.0" + micromark-extension-gfm-tagfilter: "npm:^2.0.0" + micromark-extension-gfm-task-list-item: "npm:^2.0.0" + micromark-util-combine-extensions: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/970e28df6ebdd7c7249f52a0dda56e0566fbfa9ae56c8eeeb2445d77b6b89d44096880cd57a1c01e7821b1f4e31009109fbaca4e89731bff7b83b8519690e5d9 + languageName: node + linkType: hard + +"micromark-extension-mdx-expression@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdx-expression@npm:3.0.0" + dependencies: + "@types/estree": "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-factory-mdx-expression: "npm:^2.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-events-to-acorn: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/fa799c594d8ff9ecbbd28e226959c4928590cfcddb60a926d9d859d00fc7acd25684b6f78dbe6a7f0830879a402b4a3628efd40bb9df1f5846e6d2b7332715f7 + languageName: node + linkType: hard + +"micromark-extension-mdx-jsx@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdx-jsx@npm:3.0.0" + dependencies: + "@types/acorn": "npm:^4.0.0" + "@types/estree": "npm:^1.0.0" + devlop: "npm:^1.0.0" + estree-util-is-identifier-name: "npm:^3.0.0" + micromark-factory-mdx-expression: "npm:^2.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/18a81c8def7f3a2088dc435bba19e649c19f679464b1a01e2c680f9518820e70fb0974b8403c790aee8f44205833a280b56ba157fe5a5b2903b476c5de5ba353 + languageName: node + linkType: hard + +"micromark-extension-mdx-md@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-mdx-md@npm:2.0.0" + dependencies: + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/bae91c61273de0e5ba80a980c03470e6cd9d7924aa936f46fbda15d780704d9386e945b99eda200e087b96254fbb4271a9545d5ce02676cd6ae67886a8bf82df + languageName: node + linkType: hard + +"micromark-extension-mdxjs-esm@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdxjs-esm@npm:3.0.0" + dependencies: + "@types/estree": "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-core-commonmark: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-events-to-acorn: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + unist-util-position-from-estree: "npm:^2.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/13e3f726495a960650cdedcba39198ace5bdc953ccb12c14d71fc9ed9bb88e40cc3ba9231e973f6984da3b3573e7ddb23ce409f7c16f52a8d57b608bf46c748d + languageName: node + linkType: hard + +"micromark-extension-mdxjs@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdxjs@npm:3.0.0" + dependencies: + acorn: "npm:^8.0.0" + acorn-jsx: "npm:^5.0.0" + micromark-extension-mdx-expression: "npm:^3.0.0" + micromark-extension-mdx-jsx: "npm:^3.0.0" + micromark-extension-mdx-md: "npm:^2.0.0" + micromark-extension-mdxjs-esm: "npm:^3.0.0" + micromark-util-combine-extensions: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/fd84f036ddad0aabbc12e7f1b3e9dcfe31573bbc413c5ae903779ef0366d7a4c08193547e7ba75718c9f45654e45f52e575cfc2f23a5f89205a8a70d9a506aea + languageName: node + linkType: hard + +"micromark-factory-destination@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-destination@npm:1.1.0" + dependencies: + micromark-util-character: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/71ebd9089bf0c9689b98ef42215c04032ae2701ae08c3546b663628553255dca18e5310dbdacddad3acd8de4f12a789835fff30dadc4da3c4e30387a75e6b488 + languageName: node + linkType: hard + +"micromark-factory-destination@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-destination@npm:2.0.0" + dependencies: + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/b73492f687d41a6a379159c2f3acbf813042346bcea523d9041d0cc6124e6715f0779dbb2a0b3422719e9764c3b09f9707880aa159557e3cb4aeb03b9d274915 + languageName: node + linkType: hard + +"micromark-factory-label@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-label@npm:1.1.0" + dependencies: + micromark-util-character: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + uvu: "npm:^0.5.0" + checksum: 10c0/5e2cd2d8214bb92a34dfcedf9c7aecf565e3648650a3a6a0495ededf15f2318dd214dc069e3026402792cd5839d395313f8ef9c2e86ca34a8facaa0f75a77753 + languageName: node + linkType: hard + +"micromark-factory-label@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-label@npm:2.0.0" + dependencies: + devlop: "npm:^1.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/8ffad00487a7891941b1d1f51d53a33c7a659dcf48617edb7a4008dad7aff67ec316baa16d55ca98ae3d75ce1d81628dbf72fedc7c6f108f740dec0d5d21c8ee + languageName: node + linkType: hard + +"micromark-factory-mdx-expression@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-factory-mdx-expression@npm:2.0.1" + dependencies: + "@types/estree": "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-events-to-acorn: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + unist-util-position-from-estree: "npm:^2.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/d9cf475a73a7fbfa09aba0d057e033d57e45b7adff78692be9efb4405c4a1717ece4594a632f92a4302e4f8f2ae96355785b616e3f5b2fe8599ec24cfdeee12d + languageName: node + linkType: hard + +"micromark-factory-space@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-space@npm:1.1.0" + dependencies: + micromark-util-character: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/3da81187ce003dd4178c7adc4674052fb8befc8f1a700ae4c8227755f38581a4ae963866dc4857488d62d1dc9837606c9f2f435fa1332f62a0f1c49b83c6a822 + languageName: node + linkType: hard + +"micromark-factory-space@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-space@npm:2.0.0" + dependencies: + micromark-util-character: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/103ca954dade963d4ff1d2f27d397833fe855ddc72590205022832ef68b775acdea67949000cee221708e376530b1de78c745267b0bf8366740840783eb37122 + languageName: node + linkType: hard + +"micromark-factory-title@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-title@npm:1.1.0" + dependencies: + micromark-factory-space: "npm:^1.0.0" + micromark-util-character: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/cf8c687d1d5c3928846a4791d4a7e2f1d7bdd2397051e20d60f06b7565a48bf85198ab6f85735e997ab3f0cbb80b8b6391f4f7ebc0aae2f2f8c3a08541257bf6 + languageName: node + linkType: hard + +"micromark-factory-title@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-title@npm:2.0.0" + dependencies: + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/2b2188e7a011b1b001faf8c860286d246d5c3485ef8819270c60a5808f4c7613e49d4e481dbdff62600ef7acdba0f5100be2d125cbd2a15e236c26b3668a8ebd + languageName: node + linkType: hard + +"micromark-factory-whitespace@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-whitespace@npm:1.1.0" + dependencies: + micromark-factory-space: "npm:^1.0.0" + micromark-util-character: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/7248cc4534f9befb38c6f398b6e38efd3199f1428fc214c9cb7ed5b6e9fa7a82c0d8cdfa9bcacde62887c9a7c8c46baf5c318b2ae8f701afbccc8ad702e92dce + languageName: node + linkType: hard + +"micromark-factory-whitespace@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-whitespace@npm:2.0.0" + dependencies: + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/4e91baab0cc71873095134bd0e225d01d9786cde352701402d71b72d317973954754e8f9f1849901f165530e6421202209f4d97c460a27bb0808ec5a3fc3148c + languageName: node + linkType: hard + +"micromark-util-character@npm:^1.0.0, micromark-util-character@npm:^1.1.0": + version: 1.2.0 + resolution: "micromark-util-character@npm:1.2.0" + dependencies: + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/3390a675a50731b58a8e5493cd802e190427f10fa782079b455b00f6b54e406e36882df7d4a3bd32b709f7a2c3735b4912597ebc1c0a99566a8d8d0b816e2cd4 + languageName: node + linkType: hard + +"micromark-util-character@npm:^2.0.0": + version: 2.1.0 + resolution: "micromark-util-character@npm:2.1.0" + dependencies: + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/fc37a76aaa5a5138191ba2bef1ac50c36b3bcb476522e98b1a42304ab4ec76f5b036a746ddf795d3de3e7004b2c09f21dd1bad42d161f39b8cfc0acd067e6373 + languageName: node + linkType: hard + +"micromark-util-chunked@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-chunked@npm:1.1.0" + dependencies: + micromark-util-symbol: "npm:^1.0.0" + checksum: 10c0/59534cf4aaf481ed58d65478d00eae0080df9b5816673f79b5ddb0cea263e5a9ee9cbb6cc565daf1eb3c8c4ff86fc4e25d38a0577539655cda823a4249efd358 + languageName: node + linkType: hard + +"micromark-util-chunked@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-chunked@npm:2.0.0" + dependencies: + micromark-util-symbol: "npm:^2.0.0" + checksum: 10c0/043b5f2abc8c13a1e2e4c378ead191d1a47ed9e0cd6d0fa5a0a430b2df9e17ada9d5de5a20688a000bbc5932507e746144acec60a9589d9a79fa60918e029203 + languageName: node + linkType: hard + +"micromark-util-classify-character@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-classify-character@npm:1.1.0" + dependencies: + micromark-util-character: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/3266453dc0fdaf584e24c9b3c91d1ed180f76b5856699c51fd2549305814fcab7ec52afb4d3e83d002a9115cd2d2b2ffdc9c0b38ed85120822bf515cc00636ec + languageName: node + linkType: hard + +"micromark-util-classify-character@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-classify-character@npm:2.0.0" + dependencies: + micromark-util-character: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/2bf5fa5050faa9b69f6c7e51dbaaf02329ab70fabad8229984381b356afbbf69db90f4617bec36d814a7d285fb7cad8e3c4e38d1daf4387dc9e240aa7f9a292a + languageName: node + linkType: hard + +"micromark-util-combine-extensions@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-combine-extensions@npm:1.1.0" + dependencies: + micromark-util-chunked: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/0bc572fab3fe77f533c29aa1b75cb847b9fc9455f67a98623ef9740b925c0b0426ad9f09bbb56f1e844ea9ebada7873d1f06d27f7c979a917692b273c4b69e31 + languageName: node + linkType: hard + +"micromark-util-combine-extensions@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-combine-extensions@npm:2.0.0" + dependencies: + micromark-util-chunked: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/cd4c8d1a85255527facb419ff3b3cc3d7b7f27005c5ef5fa7ef2c4d0e57a9129534fc292a188ec2d467c2c458642d369c5f894bc8a9e142aed6696cc7989d3ea + languageName: node + linkType: hard + +"micromark-util-decode-numeric-character-reference@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-decode-numeric-character-reference@npm:1.1.0" + dependencies: + micromark-util-symbol: "npm:^1.0.0" + checksum: 10c0/64ef2575e3fc2426976c19e16973348f20b59ddd5543f1467ac2e251f29e0a91f12089703d29ae985b0b9a408ee0d72f06d04ed3920811aa2402aabca3bdf9e4 + languageName: node + linkType: hard + +"micromark-util-decode-numeric-character-reference@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-util-decode-numeric-character-reference@npm:2.0.1" + dependencies: + micromark-util-symbol: "npm:^2.0.0" + checksum: 10c0/3f6d684ee8f317c67806e19b3e761956256cb936a2e0533aad6d49ac5604c6536b2041769c6febdd387ab7175b7b7e551851bf2c1f78da943e7a3671ca7635ac + languageName: node + linkType: hard + +"micromark-util-decode-string@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-decode-string@npm:1.1.0" + dependencies: + decode-named-character-reference: "npm:^1.0.0" + micromark-util-character: "npm:^1.0.0" + micromark-util-decode-numeric-character-reference: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + checksum: 10c0/757a0aaa5ad6c50c7480bd75371d407ac75f5022cd4404aba07adadf1448189502aea9bb7b2d09d25e18745e0abf72b95506b6beb184bcccabe919e48e3a5df7 + languageName: node + linkType: hard + +"micromark-util-decode-string@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-decode-string@npm:2.0.0" + dependencies: + decode-named-character-reference: "npm:^1.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-decode-numeric-character-reference: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + checksum: 10c0/f5413bebb21bdb686cfa1bcfa7e9c93093a523d1b42443ead303b062d2d680a94e5e8424549f57b8ba9d786a758e5a26a97f56068991bbdbca5d1885b3aa7227 + languageName: node + linkType: hard + +"micromark-util-encode@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-encode@npm:1.1.0" + checksum: 10c0/9878c9bc96999d45626a7597fffac85348ea842dce75d2417345cbf070a9941c62477bd0963bef37d4f0fd29f2982be6ddf416d62806f00ccb334af9d6ee87e7 + languageName: node + linkType: hard + +"micromark-util-encode@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-encode@npm:2.0.0" + checksum: 10c0/ebdaafff23100bbf4c74e63b4b1612a9ddf94cd7211d6a076bc6fb0bc32c1b48d6fb615aa0953e607c62c97d849f97f1042260d3eb135259d63d372f401bbbb2 + languageName: node + linkType: hard + +"micromark-util-events-to-acorn@npm:^2.0.0": + version: 2.0.2 + resolution: "micromark-util-events-to-acorn@npm:2.0.2" + dependencies: + "@types/acorn": "npm:^4.0.0" + "@types/estree": "npm:^1.0.0" + "@types/unist": "npm:^3.0.0" + devlop: "npm:^1.0.0" + estree-util-visit: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/2bd2660a49efddb625e6adcabdc3384ae4c50c7a04270737270f4aab53d09e8253e6d2607cd947c4c77f8a9900278915babb240e61fd143dc5bab51d9fd50709 + languageName: node + linkType: hard + +"micromark-util-html-tag-name@npm:^1.0.0": + version: 1.2.0 + resolution: "micromark-util-html-tag-name@npm:1.2.0" + checksum: 10c0/15421869678d36b4fe51df453921e8186bff514a14e9f79f32b7e1cdd67874e22a66ad34a7f048dd132cbbbfc7c382ae2f777a2bfd1f245a47705dc1c6d4f199 + languageName: node + linkType: hard + +"micromark-util-html-tag-name@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-html-tag-name@npm:2.0.0" + checksum: 10c0/988aa26367449bd345b627ae32cf605076daabe2dc1db71b578a8a511a47123e14af466bcd6dcbdacec60142f07bc2723ec5f7a0eed0f5319ce83b5e04825429 + languageName: node + linkType: hard + +"micromark-util-normalize-identifier@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-normalize-identifier@npm:1.1.0" + dependencies: + micromark-util-symbol: "npm:^1.0.0" + checksum: 10c0/a9657321a2392584e4d978061882117a84db7d2c2c1c052c0f5d25da089d463edb9f956d5beaf7f5768984b6f72d046d59b5972951ec7bf25397687a62b8278a + languageName: node + linkType: hard + +"micromark-util-normalize-identifier@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-normalize-identifier@npm:2.0.0" + dependencies: + micromark-util-symbol: "npm:^2.0.0" + checksum: 10c0/93bf8789b8449538f22cf82ac9b196363a5f3b2f26efd98aef87c4c1b1f8c05be3ef6391ff38316ff9b03c1a6fd077342567598019ddd12b9bd923dacc556333 + languageName: node + linkType: hard + +"micromark-util-resolve-all@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-resolve-all@npm:1.1.0" + dependencies: + micromark-util-types: "npm:^1.0.0" + checksum: 10c0/b5c95484c06e87bbbb60d8430eb030a458733a5270409f4c67892d1274737087ca6a7ca888987430e57cf1dcd44bb16390d3b3936a2bf07f7534ec8f52ce43c9 + languageName: node + linkType: hard + +"micromark-util-resolve-all@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-resolve-all@npm:2.0.0" + dependencies: + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/3b912e88453dcefe728a9080c8934a75ac4732056d6576ceecbcaf97f42c5d6fa2df66db8abdc8427eb167c5ffddefe26713728cfe500bc0e314ed260d6e2746 + languageName: node + linkType: hard + +"micromark-util-sanitize-uri@npm:^1.0.0": + version: 1.2.0 + resolution: "micromark-util-sanitize-uri@npm:1.2.0" + dependencies: + micromark-util-character: "npm:^1.0.0" + micromark-util-encode: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + checksum: 10c0/dbdb98248e9f0408c7a00f1c1cd805775b41d213defd659533835f34b38da38e8f990bf7b3f782e96bffbc549aec9c3ecdab197d4ad5adbfe08f814a70327b6e + languageName: node + linkType: hard + +"micromark-util-sanitize-uri@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-sanitize-uri@npm:2.0.0" + dependencies: + micromark-util-character: "npm:^2.0.0" + micromark-util-encode: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + checksum: 10c0/74763ca1c927dd520d3ab8fd9856a19740acf76fc091f0a1f5d4e99c8cd5f1b81c5a0be3efb564941a071fb6d85fd951103f2760eb6cff77b5ab3abe08341309 + languageName: node + linkType: hard + +"micromark-util-subtokenize@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-subtokenize@npm:1.1.0" + dependencies: + micromark-util-chunked: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.0" + uvu: "npm:^0.5.0" + checksum: 10c0/f292b1b162845db50d36255c9d4c4c6d47931fbca3ac98a80c7e536d2163233fd662f8ca0479ee2b80f145c66a1394c7ed17dfce801439741211015e77e3901e + languageName: node + linkType: hard + +"micromark-util-subtokenize@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-util-subtokenize@npm:2.0.1" + dependencies: + devlop: "npm:^1.0.0" + micromark-util-chunked: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/000cefde827db129f4ed92b8fbdeb4866c5f9c93068c0115485564b0426abcb9058080aa257df9035e12ca7fa92259d66623ea750b9eb3bcdd8325d3fb6fc237 + languageName: node + linkType: hard + +"micromark-util-symbol@npm:^1.0.0, micromark-util-symbol@npm:^1.0.1": + version: 1.1.0 + resolution: "micromark-util-symbol@npm:1.1.0" + checksum: 10c0/10ceaed33a90e6bfd3a5d57053dbb53f437d4809cc11430b5a09479c0ba601577059be9286df4a7eae6e350a60a2575dc9fa9d9872b5b8d058c875e075c33803 + languageName: node + linkType: hard + +"micromark-util-symbol@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-symbol@npm:2.0.0" + checksum: 10c0/4e76186c185ce4cefb9cea8584213d9ffacd77099d1da30c0beb09fa21f46f66f6de4c84c781d7e34ff763fe3a06b530e132fa9004882afab9e825238d0aa8b3 + languageName: node + linkType: hard + +"micromark-util-types@npm:^1.0.0, micromark-util-types@npm:^1.0.1": + version: 1.1.0 + resolution: "micromark-util-types@npm:1.1.0" + checksum: 10c0/a9749cb0a12a252ff536baabcb7012421b6fad4d91a5fdd80d7b33dc7b4c22e2d0c4637dfe5b902d00247fe6c9b01f4a24fce6b572b16ccaa4da90e6ce2a11e4 + languageName: node + linkType: hard + +"micromark-util-types@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-types@npm:2.0.0" + checksum: 10c0/d74e913b9b61268e0d6939f4209e3abe9dada640d1ee782419b04fd153711112cfaaa3c4d5f37225c9aee1e23c3bb91a1f5223e1e33ba92d33e83956a53e61de + languageName: node + linkType: hard + +"micromark@npm:^3.0.0": + version: 3.2.0 + resolution: "micromark@npm:3.2.0" + dependencies: + "@types/debug": "npm:^4.0.0" + debug: "npm:^4.0.0" + decode-named-character-reference: "npm:^1.0.0" + micromark-core-commonmark: "npm:^1.0.1" + micromark-factory-space: "npm:^1.0.0" + micromark-util-character: "npm:^1.0.0" + micromark-util-chunked: "npm:^1.0.0" + micromark-util-combine-extensions: "npm:^1.0.0" + micromark-util-decode-numeric-character-reference: "npm:^1.0.0" + micromark-util-encode: "npm:^1.0.0" + micromark-util-normalize-identifier: "npm:^1.0.0" + micromark-util-resolve-all: "npm:^1.0.0" + micromark-util-sanitize-uri: "npm:^1.0.0" + micromark-util-subtokenize: "npm:^1.0.0" + micromark-util-symbol: "npm:^1.0.0" + micromark-util-types: "npm:^1.0.1" + uvu: "npm:^0.5.0" + checksum: 10c0/f243e805d1b3cc699fddae2de0b1492bc82462f1a709d7ae5c82039f88b1e009c959100184717e748be057b5f88603289d5681679a4e6fbabcd037beb34bc744 + languageName: node + linkType: hard + +"micromark@npm:^4.0.0": + version: 4.0.0 + resolution: "micromark@npm:4.0.0" + dependencies: + "@types/debug": "npm:^4.0.0" + debug: "npm:^4.0.0" + decode-named-character-reference: "npm:^1.0.0" + devlop: "npm:^1.0.0" + micromark-core-commonmark: "npm:^2.0.0" + micromark-factory-space: "npm:^2.0.0" + micromark-util-character: "npm:^2.0.0" + micromark-util-chunked: "npm:^2.0.0" + micromark-util-combine-extensions: "npm:^2.0.0" + micromark-util-decode-numeric-character-reference: "npm:^2.0.0" + micromark-util-encode: "npm:^2.0.0" + micromark-util-normalize-identifier: "npm:^2.0.0" + micromark-util-resolve-all: "npm:^2.0.0" + micromark-util-sanitize-uri: "npm:^2.0.0" + micromark-util-subtokenize: "npm:^2.0.0" + micromark-util-symbol: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + checksum: 10c0/7e91c8d19ff27bc52964100853f1b3b32bb5b2ece57470a34ba1b2f09f4e2a183d90106c4ae585c9f2046969ee088576fed79b2f7061cba60d16652ccc2c64fd + languageName: node + linkType: hard + +"micromark@npm:~2.11.0": + version: 2.11.4 + resolution: "micromark@npm:2.11.4" + dependencies: + debug: "npm:^4.0.0" + parse-entities: "npm:^2.0.0" + checksum: 10c0/67307cbacae621ab1eb23e333a5addc7600cf97d3b40cad22fc1c2d03d734d6d9cbc3f5a7e5d655a8c0862a949abe590ab7cfa96be366bfe09e239a94e6eea55 + languageName: node + linkType: hard + +"micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5": + version: 4.0.8 + resolution: "micromatch@npm:4.0.8" + dependencies: + braces: "npm:^3.0.3" + picomatch: "npm:^2.3.1" + checksum: 10c0/166fa6eb926b9553f32ef81f5f531d27b4ce7da60e5baf8c021d043b27a388fb95e46a8038d5045877881e673f8134122b59624d5cecbd16eb50a42e7a6b5ca8 + languageName: node + linkType: hard + +"mime-db@npm:1.52.0": + version: 1.52.0 + resolution: "mime-db@npm:1.52.0" + checksum: 10c0/0557a01deebf45ac5f5777fe7740b2a5c309c6d62d40ceab4e23da9f821899ce7a900b7ac8157d4548ddbb7beffe9abc621250e6d182b0397ec7f10c7b91a5aa + languageName: node + linkType: hard + +"mime-db@npm:>= 1.43.0 < 2": + version: 1.53.0 + resolution: "mime-db@npm:1.53.0" + checksum: 10c0/1dcc37ba8ed5d1c179f5c6f0837e8db19371d5f2ea3690c3c2f3fa8c3858f976851d3460b172b4dee78ebd606762cbb407aa398545fbacd539e519f858cd7bf4 + languageName: node + linkType: hard + +"mime-db@npm:~1.33.0": + version: 1.33.0 + resolution: "mime-db@npm:1.33.0" + checksum: 10c0/79172ce5468c8503b49dddfdddc18d3f5fe2599f9b5fe1bc321a8cbee14c96730fc6db22f907b23701b05b2936f865795f62ec3a78a7f3c8cb2450bb68c6763e + languageName: node + linkType: hard + +"mime-types@npm:2.1.18": + version: 2.1.18 + resolution: "mime-types@npm:2.1.18" + dependencies: + mime-db: "npm:~1.33.0" + checksum: 10c0/a96a8d12f4bb98bc7bfac6a8ccbd045f40368fc1030d9366050c3613825d3715d1c1f393e10a75a885d2cdc1a26cd6d5e11f3a2a0d5c4d361f00242139430a0f + languageName: node + linkType: hard + +"mime-types@npm:^2.1.27, mime-types@npm:^2.1.31, mime-types@npm:~2.1.17, mime-types@npm:~2.1.24, mime-types@npm:~2.1.34": + version: 2.1.35 + resolution: "mime-types@npm:2.1.35" + dependencies: + mime-db: "npm:1.52.0" + checksum: 10c0/82fb07ec56d8ff1fc999a84f2f217aa46cb6ed1033fefaabd5785b9a974ed225c90dc72fff460259e66b95b73648596dbcc50d51ed69cdf464af2d237d3149b2 + languageName: node + linkType: hard + +"mime@npm:1.6.0": + version: 1.6.0 + resolution: "mime@npm:1.6.0" + bin: + mime: cli.js + checksum: 10c0/b92cd0adc44888c7135a185bfd0dddc42c32606401c72896a842ae15da71eb88858f17669af41e498b463cd7eb998f7b48939a25b08374c7924a9c8a6f8a81b0 + languageName: node + linkType: hard + +"mimic-fn@npm:^2.1.0": + version: 2.1.0 + resolution: "mimic-fn@npm:2.1.0" + checksum: 10c0/b26f5479d7ec6cc2bce275a08f146cf78f5e7b661b18114e2506dd91ec7ec47e7a25bf4360e5438094db0560bcc868079fb3b1fb3892b833c1ecbf63f80c95a4 + languageName: node + linkType: hard + +"mimic-response@npm:^3.1.0": + version: 3.1.0 + resolution: "mimic-response@npm:3.1.0" + checksum: 10c0/0d6f07ce6e03e9e4445bee655202153bdb8a98d67ee8dc965ac140900d7a2688343e6b4c9a72cfc9ef2f7944dfd76eef4ab2482eb7b293a68b84916bac735362 + languageName: node + linkType: hard + +"mimic-response@npm:^4.0.0": + version: 4.0.0 + resolution: "mimic-response@npm:4.0.0" + checksum: 10c0/761d788d2668ae9292c489605ffd4fad220f442fbae6832adce5ebad086d691e906a6d5240c290293c7a11e99fbdbbef04abbbed498bf8699a4ee0f31315e3fb + languageName: node + linkType: hard + +"mini-css-extract-plugin@npm:^2.7.6": + version: 2.9.1 + resolution: "mini-css-extract-plugin@npm:2.9.1" + dependencies: + schema-utils: "npm:^4.0.0" + tapable: "npm:^2.2.1" + peerDependencies: + webpack: ^5.0.0 + checksum: 10c0/19361902ef028b9875aafa3931d99643c2d95824ba343a501c83ff61d069a430fcfc523ca796765798b564570da2199f5a28cd51b9528ddbcfdc9271c61400d0 + languageName: node + linkType: hard + +"minimalistic-assert@npm:^1.0.0": + version: 1.0.1 + resolution: "minimalistic-assert@npm:1.0.1" + checksum: 10c0/96730e5601cd31457f81a296f521eb56036e6f69133c0b18c13fe941109d53ad23a4204d946a0d638d7f3099482a0cec8c9bb6d642604612ce43ee536be3dddd + languageName: node + linkType: hard + +"minimatch@npm:3.1.2, minimatch@npm:^3.0.4, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: "npm:^1.1.7" + checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + languageName: node + linkType: hard + +"minimatch@npm:9.0.3": + version: 9.0.3 + resolution: "minimatch@npm:9.0.3" + dependencies: + brace-expansion: "npm:^2.0.1" + checksum: 10c0/85f407dcd38ac3e180f425e86553911d101455ca3ad5544d6a7cec16286657e4f8a9aa6695803025c55e31e35a91a2252b5dc8e7d527211278b8b65b4dbd5eac + languageName: node + linkType: hard + +"minimatch@npm:^9.0.0, minimatch@npm:^9.0.4": + version: 9.0.5 + resolution: "minimatch@npm:9.0.5" + dependencies: + brace-expansion: "npm:^2.0.1" + checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + languageName: node + linkType: hard + +"minimist@npm:^1.2.0, minimist@npm:^1.2.3": + version: 1.2.8 + resolution: "minimist@npm:1.2.8" + checksum: 10c0/19d3fcdca050087b84c2029841a093691a91259a47def2f18222f41e7645a0b7c44ef4b40e88a1e58a40c84d2ef0ee6047c55594d298146d0eb3f6b737c20ce6 + languageName: node + linkType: hard + +"minipass-collect@npm:^2.0.1": + version: 2.0.1 + resolution: "minipass-collect@npm:2.0.1" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/5167e73f62bb74cc5019594709c77e6a742051a647fe9499abf03c71dca75515b7959d67a764bdc4f8b361cf897fbf25e2d9869ee039203ed45240f48b9aa06e + languageName: node + linkType: hard + +"minipass-fetch@npm:^3.0.0": + version: 3.0.5 + resolution: "minipass-fetch@npm:3.0.5" + dependencies: + encoding: "npm:^0.1.13" + minipass: "npm:^7.0.3" + minipass-sized: "npm:^1.0.3" + minizlib: "npm:^2.1.2" + dependenciesMeta: + encoding: + optional: true + checksum: 10c0/9d702d57f556274286fdd97e406fc38a2f5c8d15e158b498d7393b1105974b21249289ec571fa2b51e038a4872bfc82710111cf75fae98c662f3d6f95e72152b + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 + languageName: node + linkType: hard + +"minipass-sized@npm:^1.0.3": + version: 1.0.3 + resolution: "minipass-sized@npm:1.0.3" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/298f124753efdc745cfe0f2bdfdd81ba25b9f4e753ca4a2066eb17c821f25d48acea607dfc997633ee5bf7b6dfffb4eee4f2051eb168663f0b99fad2fa4829cb + languageName: node + linkType: hard + +"minipass@npm:^3.0.0": + version: 3.3.6 + resolution: "minipass@npm:3.3.6" + dependencies: + yallist: "npm:^4.0.0" + checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c + languageName: node + linkType: hard + +"minipass@npm:^5.0.0": + version: 5.0.0 + resolution: "minipass@npm:5.0.0" + checksum: 10c0/a91d8043f691796a8ac88df039da19933ef0f633e3d7f0d35dcd5373af49131cf2399bfc355f41515dc495e3990369c3858cd319e5c2722b4753c90bf3152462 + languageName: node + linkType: hard + +"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.1.2": + version: 7.1.2 + resolution: "minipass@npm:7.1.2" + checksum: 10c0/b0fd20bb9fb56e5fa9a8bfac539e8915ae07430a619e4b86ff71f5fc757ef3924b23b2c4230393af1eda647ed3d75739e4e0acb250a6b1eb277cf7f8fe449557 + languageName: node + linkType: hard + +"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2": + version: 2.1.2 + resolution: "minizlib@npm:2.1.2" + dependencies: + minipass: "npm:^3.0.0" + yallist: "npm:^4.0.0" + checksum: 10c0/64fae024e1a7d0346a1102bb670085b17b7f95bf6cfdf5b128772ec8faf9ea211464ea4add406a3a6384a7d87a0cd1a96263692134323477b4fb43659a6cab78 + languageName: node + linkType: hard + +"mkdirp-classic@npm:^0.5.2, mkdirp-classic@npm:^0.5.3": + version: 0.5.3 + resolution: "mkdirp-classic@npm:0.5.3" + checksum: 10c0/95371d831d196960ddc3833cc6907e6b8f67ac5501a6582f47dfae5eb0f092e9f8ce88e0d83afcae95d6e2b61a01741ba03714eeafb6f7a6e9dcc158ac85b168 + languageName: node + linkType: hard + +"mkdirp@npm:^1.0.3": + version: 1.0.4 + resolution: "mkdirp@npm:1.0.4" + bin: + mkdirp: bin/cmd.js + checksum: 10c0/46ea0f3ffa8bc6a5bc0c7081ffc3907777f0ed6516888d40a518c5111f8366d97d2678911ad1a6882bf592fa9de6c784fea32e1687bb94e1f4944170af48a5cf + languageName: node + linkType: hard + +"modern-normalize@npm:^3.0.1": + version: 3.0.1 + resolution: "modern-normalize@npm:3.0.1" + checksum: 10c0/2c19be1618178b85006e354337d7dda6758640d138be5f3e247c9ebb5dd5d7845edb0b8753cec659e789bb30e1fafe73e91eaa056a332d89055cbb107d9fecec + languageName: node + linkType: hard + +"mri@npm:^1.1.0": + version: 1.2.0 + resolution: "mri@npm:1.2.0" + checksum: 10c0/a3d32379c2554cf7351db6237ddc18dc9e54e4214953f3da105b97dc3babe0deb3ffe99cf409b38ea47cc29f9430561ba6b53b24ab8f9ce97a4b50409e4a50e7 + languageName: node + linkType: hard + +"mrmime@npm:^2.0.0": + version: 2.0.0 + resolution: "mrmime@npm:2.0.0" + checksum: 10c0/312b35ed288986aec90955410b21ed7427fd1e4ee318cb5fc18765c8d029eeded9444faa46589e5b1ed6b35fb2054a802ac8dcb917ddf6b3e189cb3bf11a965c + languageName: node + linkType: hard + +"ms@npm:2.0.0": + version: 2.0.0 + resolution: "ms@npm:2.0.0" + checksum: 10c0/f8fda810b39fd7255bbdc451c46286e549794fcc700dc9cd1d25658bbc4dc2563a5de6fe7c60f798a16a60c6ceb53f033cb353f493f0cf63e5199b702943159d + languageName: node + linkType: hard + +"ms@npm:2.1.2": + version: 2.1.2 + resolution: "ms@npm:2.1.2" + checksum: 10c0/a437714e2f90dbf881b5191d35a6db792efbca5badf112f87b9e1c712aace4b4b9b742dd6537f3edf90fd6f684de897cec230abde57e87883766712ddda297cc + languageName: node + linkType: hard + +"ms@npm:2.1.3": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 + languageName: node + linkType: hard + +"multicast-dns@npm:^7.2.5": + version: 7.2.5 + resolution: "multicast-dns@npm:7.2.5" + dependencies: + dns-packet: "npm:^5.2.2" + thunky: "npm:^1.0.2" + bin: + multicast-dns: cli.js + checksum: 10c0/5120171d4bdb1577764c5afa96e413353bff530d1b37081cb29cccc747f989eb1baf40574fe8e27060fc1aef72b59c042f72b9b208413de33bcf411343c69057 + languageName: node + linkType: hard + +"nanoid@npm:^3.3.7": + version: 3.3.7 + resolution: "nanoid@npm:3.3.7" + bin: + nanoid: bin/nanoid.cjs + checksum: 10c0/e3fb661aa083454f40500473bb69eedb85dc160e763150b9a2c567c7e9ff560ce028a9f833123b618a6ea742e311138b591910e795614a629029e86e180660f3 + languageName: node + linkType: hard + +"napi-build-utils@npm:^1.0.1": + version: 1.0.2 + resolution: "napi-build-utils@npm:1.0.2" + checksum: 10c0/37fd2cd0ff2ad20073ce78d83fd718a740d568b225924e753ae51cb69d68f330c80544d487e5e5bd18e28702ed2ca469c2424ad948becd1862c1b0209542b2e9 + languageName: node + linkType: hard + +"natural-compare@npm:^1.4.0": + version: 1.4.0 + resolution: "natural-compare@npm:1.4.0" + checksum: 10c0/f5f9a7974bfb28a91afafa254b197f0f22c684d4a1731763dda960d2c8e375b36c7d690e0d9dc8fba774c537af14a7e979129bca23d88d052fbeb9466955e447 + languageName: node + linkType: hard + +"negotiator@npm:0.6.3, negotiator@npm:^0.6.3": + version: 0.6.3 + resolution: "negotiator@npm:0.6.3" + checksum: 10c0/3ec9fd413e7bf071c937ae60d572bc67155262068ed522cf4b3be5edbe6ddf67d095ec03a3a14ebf8fc8e95f8e1d61be4869db0dbb0de696f6b837358bd43fc2 + languageName: node + linkType: hard + +"neo-async@npm:^2.6.2": + version: 2.6.2 + resolution: "neo-async@npm:2.6.2" + checksum: 10c0/c2f5a604a54a8ec5438a342e1f356dff4bc33ccccdb6dc668d94fe8e5eccfc9d2c2eea6064b0967a767ba63b33763f51ccf2cd2441b461a7322656c1f06b3f5d + languageName: node + linkType: hard + +"no-case@npm:^3.0.4": + version: 3.0.4 + resolution: "no-case@npm:3.0.4" + dependencies: + lower-case: "npm:^2.0.2" + tslib: "npm:^2.0.3" + checksum: 10c0/8ef545f0b3f8677c848f86ecbd42ca0ff3cd9dd71c158527b344c69ba14710d816d8489c746b6ca225e7b615108938a0bda0a54706f8c255933703ac1cf8e703 + languageName: node + linkType: hard + +"node-abi@npm:^3.3.0": + version: 3.67.0 + resolution: "node-abi@npm:3.67.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/72ce2edbdfb84745bc201a4e48aa7146fd88a0d2c80046b6b17f28439c9a7683eab846f40f1e819349c31f7d9331ed5c50d1e741208d938dd5f38b29cab2275e + languageName: node + linkType: hard + +"node-addon-api@npm:^6.1.0": + version: 6.1.0 + resolution: "node-addon-api@npm:6.1.0" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/d2699c4ad15740fd31482a3b6fca789af7723ab9d393adc6ac45250faaee72edad8f0b10b2b9d087df0de93f1bdc16d97afdd179b26b9ebc9ed68b569faa4bac + languageName: node + linkType: hard + +"node-emoji@npm:^2.1.0": + version: 2.1.3 + resolution: "node-emoji@npm:2.1.3" + dependencies: + "@sindresorhus/is": "npm:^4.6.0" + char-regex: "npm:^1.0.2" + emojilib: "npm:^2.4.0" + skin-tone: "npm:^2.0.0" + checksum: 10c0/e688333373563aa8308df16111eee2b5837b53a51fb63bf8b7fbea2896327c5d24c9984eb0c8ca6ac155d4d9c194dcf1840d271033c1b588c7c45a3b65339ef7 + languageName: node + linkType: hard + +"node-forge@npm:^1": + version: 1.3.1 + resolution: "node-forge@npm:1.3.1" + checksum: 10c0/e882819b251a4321f9fc1d67c85d1501d3004b4ee889af822fd07f64de3d1a8e272ff00b689570af0465d65d6bf5074df9c76e900e0aff23e60b847f2a46fbe8 + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 10.2.0 + resolution: "node-gyp@npm:10.2.0" + dependencies: + env-paths: "npm:^2.2.0" + exponential-backoff: "npm:^3.1.1" + glob: "npm:^10.3.10" + graceful-fs: "npm:^4.2.6" + make-fetch-happen: "npm:^13.0.0" + nopt: "npm:^7.0.0" + proc-log: "npm:^4.1.0" + semver: "npm:^7.3.5" + tar: "npm:^6.2.1" + which: "npm:^4.0.0" + bin: + node-gyp: bin/node-gyp.js + checksum: 10c0/00630d67dbd09a45aee0a5d55c05e3916ca9e6d427ee4f7bc392d2d3dc5fad7449b21fc098dd38260a53d9dcc9c879b36704a1994235d4707e7271af7e9a835b + languageName: node + linkType: hard + +"node-releases@npm:^2.0.18": + version: 2.0.18 + resolution: "node-releases@npm:2.0.18" + checksum: 10c0/786ac9db9d7226339e1dc84bbb42007cb054a346bd9257e6aa154d294f01bc6a6cddb1348fa099f079be6580acbb470e3c048effd5f719325abd0179e566fd27 + languageName: node + linkType: hard + +"non-layered-tidy-tree-layout@npm:^2.0.2": + version: 2.0.2 + resolution: "non-layered-tidy-tree-layout@npm:2.0.2" + checksum: 10c0/73856e9959667193e733a7ef2b06a69421f4d9d7428a3982ce39763cd979a04eed0007f2afb3414afa3f6dc4dc6b5c850c2af9aa71a974475236a465093ec9c7 + languageName: node + linkType: hard + +"nopt@npm:^7.0.0, nopt@npm:^7.2.1": + version: 7.2.1 + resolution: "nopt@npm:7.2.1" + dependencies: + abbrev: "npm:^2.0.0" + bin: + nopt: bin/nopt.js + checksum: 10c0/a069c7c736767121242037a22a788863accfa932ab285a1eb569eb8cd534b09d17206f68c37f096ae785647435e0c5a5a0a67b42ec743e481a455e5ae6a6df81 + languageName: node + linkType: hard + +"normalize-package-data@npm:^6.0.0": + version: 6.0.2 + resolution: "normalize-package-data@npm:6.0.2" + dependencies: + hosted-git-info: "npm:^7.0.0" + semver: "npm:^7.3.5" + validate-npm-package-license: "npm:^3.0.4" + checksum: 10c0/7e32174e7f5575ede6d3d449593247183880122b4967d4ae6edb28cea5769ca025defda54fc91ec0e3c972fdb5ab11f9284606ba278826171b264cb16a9311ef + languageName: node + linkType: hard + +"normalize-path@npm:^3.0.0, normalize-path@npm:~3.0.0": + version: 3.0.0 + resolution: "normalize-path@npm:3.0.0" + checksum: 10c0/e008c8142bcc335b5e38cf0d63cfd39d6cf2d97480af9abdbe9a439221fd4d749763bab492a8ee708ce7a194bb00c9da6d0a115018672310850489137b3da046 + languageName: node + linkType: hard + +"normalize-range@npm:^0.1.2": + version: 0.1.2 + resolution: "normalize-range@npm:0.1.2" + checksum: 10c0/bf39b73a63e0a42ad1a48c2bd1bda5a07ede64a7e2567307a407674e595bcff0fa0d57e8e5f1e7fa5e91000797c7615e13613227aaaa4d6d6e87f5bd5cc95de6 + languageName: node + linkType: hard + +"normalize-url@npm:^8.0.0": + version: 8.0.1 + resolution: "normalize-url@npm:8.0.1" + checksum: 10c0/eb439231c4b84430f187530e6fdac605c5048ef4ec556447a10c00a91fc69b52d8d8298d9d608e68d3e0f7dc2d812d3455edf425e0f215993667c3183bcab1ef + languageName: node + linkType: hard + +"npm-install-checks@npm:^6.0.0": + version: 6.3.0 + resolution: "npm-install-checks@npm:6.3.0" + dependencies: + semver: "npm:^7.1.1" + checksum: 10c0/b046ef1de9b40f5d3a9831ce198e1770140a1c3f253dae22eb7b06045191ef79f18f1dcc15a945c919b3c161426861a28050abd321bf439190185794783b6452 + languageName: node + linkType: hard + +"npm-normalize-package-bin@npm:^3.0.0": + version: 3.0.1 + resolution: "npm-normalize-package-bin@npm:3.0.1" + checksum: 10c0/f1831a7f12622840e1375c785c3dab7b1d82dd521211c17ee5e9610cd1a34d8b232d3fdeebf50c170eddcb321d2c644bf73dbe35545da7d588c6b3fa488db0a5 + languageName: node + linkType: hard + +"npm-package-arg@npm:^11.0.0": + version: 11.0.3 + resolution: "npm-package-arg@npm:11.0.3" + dependencies: + hosted-git-info: "npm:^7.0.0" + proc-log: "npm:^4.0.0" + semver: "npm:^7.3.5" + validate-npm-package-name: "npm:^5.0.0" + checksum: 10c0/e18333485e05c3a8774f4b5701ef74f4799533e650b70a68ca8dd697666c9a8d46932cb765fc593edce299521033bd4025a40323d5240cea8a393c784c0c285a + languageName: node + linkType: hard + +"npm-pick-manifest@npm:^9.0.0": + version: 9.1.0 + resolution: "npm-pick-manifest@npm:9.1.0" + dependencies: + npm-install-checks: "npm:^6.0.0" + npm-normalize-package-bin: "npm:^3.0.0" + npm-package-arg: "npm:^11.0.0" + semver: "npm:^7.3.5" + checksum: 10c0/8765f4199755b381323da2bff2202b4b15b59f59dba0d1be3f2f793b591321cd19e1b5a686ef48d9753a6bd4868550da632541a45dfb61809d55664222d73e44 + languageName: node + linkType: hard + +"npm-run-path@npm:^4.0.1": + version: 4.0.1 + resolution: "npm-run-path@npm:4.0.1" + dependencies: + path-key: "npm:^3.0.0" + checksum: 10c0/6f9353a95288f8455cf64cbeb707b28826a7f29690244c1e4bb61ec573256e021b6ad6651b394eb1ccfd00d6ec50147253aba2c5fe58a57ceb111fad62c519ac + languageName: node + linkType: hard + +"nprogress@npm:^0.2.0": + version: 0.2.0 + resolution: "nprogress@npm:0.2.0" + checksum: 10c0/eab9a923a1ad1eed71a455ecfbc358442dd9bcd71b9fa3fa1c67eddf5159360b182c218f76fca320c97541a1b45e19ced04e6dcb044a662244c5419f8ae9e821 + languageName: node + linkType: hard + +"nth-check@npm:^2.0.1": + version: 2.1.1 + resolution: "nth-check@npm:2.1.1" + dependencies: + boolbase: "npm:^1.0.0" + checksum: 10c0/5fee7ff309727763689cfad844d979aedd2204a817fbaaf0e1603794a7c20db28548d7b024692f953557df6ce4a0ee4ae46cd8ebd9b36cfb300b9226b567c479 + languageName: node + linkType: hard + +"object-assign@npm:^4.1.1": + version: 4.1.1 + resolution: "object-assign@npm:4.1.1" + checksum: 10c0/1f4df9945120325d041ccf7b86f31e8bcc14e73d29171e37a7903050e96b81323784ec59f93f102ec635bcf6fa8034ba3ea0a8c7e69fa202b87ae3b6cec5a414 + languageName: node + linkType: hard + +"object-inspect@npm:^1.13.1": + version: 1.13.2 + resolution: "object-inspect@npm:1.13.2" + checksum: 10c0/b97835b4c91ec37b5fd71add84f21c3f1047d1d155d00c0fcd6699516c256d4fcc6ff17a1aced873197fe447f91a3964178fd2a67a1ee2120cdaf60e81a050b4 + languageName: node + linkType: hard + +"object-keys@npm:^1.1.1": + version: 1.1.1 + resolution: "object-keys@npm:1.1.1" + checksum: 10c0/b11f7ccdbc6d406d1f186cdadb9d54738e347b2692a14439ca5ac70c225fa6db46db809711b78589866d47b25fc3e8dee0b4c722ac751e11180f9380e3d8601d + languageName: node + linkType: hard + +"object.assign@npm:^4.1.0, object.assign@npm:^4.1.4, object.assign@npm:^4.1.5": + version: 4.1.5 + resolution: "object.assign@npm:4.1.5" + dependencies: + call-bind: "npm:^1.0.5" + define-properties: "npm:^1.2.1" + has-symbols: "npm:^1.0.3" + object-keys: "npm:^1.1.1" + checksum: 10c0/60108e1fa2706f22554a4648299b0955236c62b3685c52abf4988d14fffb0e7731e00aa8c6448397e3eb63d087dcc124a9f21e1980f36d0b2667f3c18bacd469 + languageName: node + linkType: hard + +"object.entries@npm:^1.1.8": + version: 1.1.8 + resolution: "object.entries@npm:1.1.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/db9ea979d2956a3bc26c262da4a4d212d36f374652cc4c13efdd069c1a519c16571c137e2893d1c46e1cb0e15c88fd6419eaf410c945f329f09835487d7e65d3 + languageName: node + linkType: hard + +"object.fromentries@npm:^2.0.8": + version: 2.0.8 + resolution: "object.fromentries@npm:2.0.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/cd4327e6c3369cfa805deb4cbbe919bfb7d3aeebf0bcaba291bb568ea7169f8f8cdbcabe2f00b40db0c20cd20f08e11b5f3a5a36fb7dd3fe04850c50db3bf83b + languageName: node + linkType: hard + +"object.values@npm:^1.1.6, object.values@npm:^1.2.0": + version: 1.2.0 + resolution: "object.values@npm:1.2.0" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/15809dc40fd6c5529501324fec5ff08570b7d70fb5ebbe8e2b3901afec35cf2b3dc484d1210c6c642cd3e7e0a5e18dd1d6850115337fef46bdae14ab0cb18ac3 + languageName: node + linkType: hard + +"obuf@npm:^1.0.0, obuf@npm:^1.1.2": + version: 1.1.2 + resolution: "obuf@npm:1.1.2" + checksum: 10c0/520aaac7ea701618eacf000fc96ae458e20e13b0569845800fc582f81b386731ab22d55354b4915d58171db00e79cfcd09c1638c02f89577ef092b38c65b7d81 + languageName: node + linkType: hard + +"on-finished@npm:2.4.1": + version: 2.4.1 + resolution: "on-finished@npm:2.4.1" + dependencies: + ee-first: "npm:1.1.1" + checksum: 10c0/46fb11b9063782f2d9968863d9cbba33d77aa13c17f895f56129c274318b86500b22af3a160fe9995aa41317efcd22941b6eba747f718ced08d9a73afdb087b4 + languageName: node + linkType: hard + +"on-headers@npm:~1.0.2": + version: 1.0.2 + resolution: "on-headers@npm:1.0.2" + checksum: 10c0/f649e65c197bf31505a4c0444875db0258e198292f34b884d73c2f751e91792ef96bb5cf89aa0f4fecc2e4dc662461dda606b1274b0e564f539cae5d2f5fc32f + languageName: node + linkType: hard + +"once@npm:^1.3.0, once@npm:^1.3.1, once@npm:^1.4.0": + version: 1.4.0 + resolution: "once@npm:1.4.0" + dependencies: + wrappy: "npm:1" + checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 + languageName: node + linkType: hard + +"onetime@npm:^5.1.2": + version: 5.1.2 + resolution: "onetime@npm:5.1.2" + dependencies: + mimic-fn: "npm:^2.1.0" + checksum: 10c0/ffcef6fbb2692c3c40749f31ea2e22677a876daea92959b8a80b521d95cca7a668c884d8b2045d1d8ee7d56796aa405c405462af112a1477594cc63531baeb8f + languageName: node + linkType: hard + +"open@npm:^8.0.9, open@npm:^8.4.0": + version: 8.4.2 + resolution: "open@npm:8.4.2" + dependencies: + define-lazy-prop: "npm:^2.0.0" + is-docker: "npm:^2.1.1" + is-wsl: "npm:^2.2.0" + checksum: 10c0/bb6b3a58401dacdb0aad14360626faf3fb7fba4b77816b373495988b724fb48941cad80c1b65d62bb31a17609b2cd91c41a181602caea597ca80dfbcc27e84c9 + languageName: node + linkType: hard + +"opener@npm:^1.5.2": + version: 1.5.2 + resolution: "opener@npm:1.5.2" + bin: + opener: bin/opener-bin.js + checksum: 10c0/dd56256ab0cf796585617bc28e06e058adf09211781e70b264c76a1dbe16e90f868c974e5bf5309c93469157c7d14b89c35dc53fe7293b0e40b4d2f92073bc79 + languageName: node + linkType: hard + +"optionator@npm:^0.9.3": + version: 0.9.4 + resolution: "optionator@npm:0.9.4" + dependencies: + deep-is: "npm:^0.1.3" + fast-levenshtein: "npm:^2.0.6" + levn: "npm:^0.4.1" + prelude-ls: "npm:^1.2.1" + type-check: "npm:^0.4.0" + word-wrap: "npm:^1.2.5" + checksum: 10c0/4afb687a059ee65b61df74dfe87d8d6815cd6883cb8b3d5883a910df72d0f5d029821f37025e4bccf4048873dbdb09acc6d303d27b8f76b1a80dd5a7d5334675 + languageName: node + linkType: hard + +"p-cancelable@npm:^3.0.0": + version: 3.0.0 + resolution: "p-cancelable@npm:3.0.0" + checksum: 10c0/948fd4f8e87b956d9afc2c6c7392de9113dac817cb1cecf4143f7a3d4c57ab5673614a80be3aba91ceec5e4b69fd8c869852d7e8048bc3d9273c4c36ce14b9aa + languageName: node + linkType: hard + +"p-limit@npm:^2.0.0": + version: 2.3.0 + resolution: "p-limit@npm:2.3.0" + dependencies: + p-try: "npm:^2.0.0" + checksum: 10c0/8da01ac53efe6a627080fafc127c873da40c18d87b3f5d5492d465bb85ec7207e153948df6b9cbaeb130be70152f874229b8242ee2be84c0794082510af97f12 + languageName: node + linkType: hard + +"p-limit@npm:^3.0.2": + version: 3.1.0 + resolution: "p-limit@npm:3.1.0" + dependencies: + yocto-queue: "npm:^0.1.0" + checksum: 10c0/9db675949dbdc9c3763c89e748d0ef8bdad0afbb24d49ceaf4c46c02c77d30db4e0652ed36d0a0a7a95154335fab810d95c86153105bb73b3a90448e2bb14e1a + languageName: node + linkType: hard + +"p-limit@npm:^4.0.0": + version: 4.0.0 + resolution: "p-limit@npm:4.0.0" + dependencies: + yocto-queue: "npm:^1.0.0" + checksum: 10c0/a56af34a77f8df2ff61ddfb29431044557fcbcb7642d5a3233143ebba805fc7306ac1d448de724352861cb99de934bc9ab74f0d16fe6a5460bdbdf938de875ad + languageName: node + linkType: hard + +"p-locate@npm:^3.0.0": + version: 3.0.0 + resolution: "p-locate@npm:3.0.0" + dependencies: + p-limit: "npm:^2.0.0" + checksum: 10c0/7b7f06f718f19e989ce6280ed4396fb3c34dabdee0df948376483032f9d5ec22fdf7077ec942143a75827bb85b11da72016497fc10dac1106c837ed593969ee8 + languageName: node + linkType: hard + +"p-locate@npm:^5.0.0": + version: 5.0.0 + resolution: "p-locate@npm:5.0.0" + dependencies: + p-limit: "npm:^3.0.2" + checksum: 10c0/2290d627ab7903b8b70d11d384fee714b797f6040d9278932754a6860845c4d3190603a0772a663c8cb5a7b21d1b16acb3a6487ebcafa9773094edc3dfe6009a + languageName: node + linkType: hard + +"p-locate@npm:^6.0.0": + version: 6.0.0 + resolution: "p-locate@npm:6.0.0" + dependencies: + p-limit: "npm:^4.0.0" + checksum: 10c0/d72fa2f41adce59c198270aa4d3c832536c87a1806e0f69dffb7c1a7ca998fb053915ca833d90f166a8c082d3859eabfed95f01698a3214c20df6bb8de046312 + languageName: node + linkType: hard + +"p-map@npm:^4.0.0": + version: 4.0.0 + resolution: "p-map@npm:4.0.0" + dependencies: + aggregate-error: "npm:^3.0.0" + checksum: 10c0/592c05bd6262c466ce269ff172bb8de7c6975afca9b50c975135b974e9bdaafbfe80e61aaaf5be6d1200ba08b30ead04b88cfa7e25ff1e3b93ab28c9f62a2c75 + languageName: node + linkType: hard + +"p-retry@npm:^4.5.0": + version: 4.6.2 + resolution: "p-retry@npm:4.6.2" + dependencies: + "@types/retry": "npm:0.12.0" + retry: "npm:^0.13.1" + checksum: 10c0/d58512f120f1590cfedb4c2e0c42cb3fa66f3cea8a4646632fcb834c56055bb7a6f138aa57b20cc236fb207c9d694e362e0b5c2b14d9b062f67e8925580c73b0 + languageName: node + linkType: hard + +"p-try@npm:^2.0.0": + version: 2.2.0 + resolution: "p-try@npm:2.2.0" + checksum: 10c0/c36c19907734c904b16994e6535b02c36c2224d433e01a2f1ab777237f4d86e6289fd5fd464850491e940379d4606ed850c03e0f9ab600b0ebddb511312e177f + languageName: node + linkType: hard + +"package-json-from-dist@npm:^1.0.0": + version: 1.0.0 + resolution: "package-json-from-dist@npm:1.0.0" + checksum: 10c0/e3ffaf6ac1040ab6082a658230c041ad14e72fabe99076a2081bb1d5d41210f11872403fc09082daf4387fc0baa6577f96c9c0e94c90c394fd57794b66aa4033 + languageName: node + linkType: hard + +"package-json@npm:^8.1.0": + version: 8.1.1 + resolution: "package-json@npm:8.1.1" + dependencies: + got: "npm:^12.1.0" + registry-auth-token: "npm:^5.0.1" + registry-url: "npm:^6.0.0" + semver: "npm:^7.3.7" + checksum: 10c0/83b057878bca229033aefad4ef51569b484e63a65831ddf164dc31f0486817e17ffcb58c819c7af3ef3396042297096b3ffc04e107fd66f8f48756f6d2071c8f + languageName: node + linkType: hard + +"param-case@npm:^3.0.4": + version: 3.0.4 + resolution: "param-case@npm:3.0.4" + dependencies: + dot-case: "npm:^3.0.4" + tslib: "npm:^2.0.3" + checksum: 10c0/ccc053f3019f878eca10e70ec546d92f51a592f762917dafab11c8b532715dcff58356118a6f350976e4ab109e321756f05739643ed0ca94298e82291e6f9e76 + languageName: node + linkType: hard + +"parent-module@npm:^1.0.0": + version: 1.0.1 + resolution: "parent-module@npm:1.0.1" + dependencies: + callsites: "npm:^3.0.0" + checksum: 10c0/c63d6e80000d4babd11978e0d3fee386ca7752a02b035fd2435960ffaa7219dc42146f07069fb65e6e8bf1caef89daf9af7535a39bddf354d78bf50d8294f556 + languageName: node + linkType: hard + +"parse-entities@npm:^2.0.0": + version: 2.0.0 + resolution: "parse-entities@npm:2.0.0" + dependencies: + character-entities: "npm:^1.0.0" + character-entities-legacy: "npm:^1.0.0" + character-reference-invalid: "npm:^1.0.0" + is-alphanumerical: "npm:^1.0.0" + is-decimal: "npm:^1.0.0" + is-hexadecimal: "npm:^1.0.0" + checksum: 10c0/f85a22c0ea406ff26b53fdc28641f01cc36fa49eb2e3135f02693286c89ef0bcefc2262d99b3688e20aac2a14fd10b75c518583e875c1b9fe3d1f937795e0854 + languageName: node + linkType: hard + +"parse-entities@npm:^4.0.0": + version: 4.0.1 + resolution: "parse-entities@npm:4.0.1" + dependencies: + "@types/unist": "npm:^2.0.0" + character-entities: "npm:^2.0.0" + character-entities-legacy: "npm:^3.0.0" + character-reference-invalid: "npm:^2.0.0" + decode-named-character-reference: "npm:^1.0.0" + is-alphanumerical: "npm:^2.0.0" + is-decimal: "npm:^2.0.0" + is-hexadecimal: "npm:^2.0.0" + checksum: 10c0/9dfa3b0dc43a913c2558c4bd625b1abcc2d6c6b38aa5724b141ed988471977248f7ad234eed57e1bc70b694dd15b0d710a04f66c2f7c096e35abd91962b7d926 + languageName: node + linkType: hard + +"parse-json@npm:^5.0.0, parse-json@npm:^5.2.0": + version: 5.2.0 + resolution: "parse-json@npm:5.2.0" + dependencies: + "@babel/code-frame": "npm:^7.0.0" + error-ex: "npm:^1.3.1" + json-parse-even-better-errors: "npm:^2.3.0" + lines-and-columns: "npm:^1.1.6" + checksum: 10c0/77947f2253005be7a12d858aedbafa09c9ae39eb4863adf330f7b416ca4f4a08132e453e08de2db46459256fb66afaac5ee758b44fe6541b7cdaf9d252e59585 + languageName: node + linkType: hard + +"parse-json@npm:^7.0.0": + version: 7.1.1 + resolution: "parse-json@npm:7.1.1" + dependencies: + "@babel/code-frame": "npm:^7.21.4" + error-ex: "npm:^1.3.2" + json-parse-even-better-errors: "npm:^3.0.0" + lines-and-columns: "npm:^2.0.3" + type-fest: "npm:^3.8.0" + checksum: 10c0/a85ebc7430af7763fa52eb456d7efd35c35be5b06f04d8d80c37d0d33312ac6cdff12647acb9c95448dcc8b907dfafa81fb126e094aa132b0abc2a71b9df51d5 + languageName: node + linkType: hard + +"parse-numeric-range@npm:^1.3.0": + version: 1.3.0 + resolution: "parse-numeric-range@npm:1.3.0" + checksum: 10c0/53465afaa92111e86697281b684aa4574427360889cc23a1c215488c06b72441febdbf09f47ab0bef9a0c701e059629f3eebd2fe6fb241a254ad7a7a642aebe8 + languageName: node + linkType: hard + +"parse5-htmlparser2-tree-adapter@npm:^7.0.0": + version: 7.0.0 + resolution: "parse5-htmlparser2-tree-adapter@npm:7.0.0" + dependencies: + domhandler: "npm:^5.0.2" + parse5: "npm:^7.0.0" + checksum: 10c0/e820cacb8486e6f7ede403327d18480df086d70e32ede2f6654d8c3a8b4b8dc4a4d5c21c03c18a92ba2466c513b93ca63be4a138dd73cd0995f384eb3b9edf11 + languageName: node + linkType: hard + +"parse5@npm:^7.0.0": + version: 7.1.2 + resolution: "parse5@npm:7.1.2" + dependencies: + entities: "npm:^4.4.0" + checksum: 10c0/297d7af8224f4b5cb7f6617ecdae98eeaed7f8cbd78956c42785e230505d5a4f07cef352af10d3006fa5c1544b76b57784d3a22d861ae071bbc460c649482bf4 + languageName: node + linkType: hard + +"parseurl@npm:~1.3.2, parseurl@npm:~1.3.3": + version: 1.3.3 + resolution: "parseurl@npm:1.3.3" + checksum: 10c0/90dd4760d6f6174adb9f20cf0965ae12e23879b5f5464f38e92fce8073354341e4b3b76fa3d878351efe7d01e617121955284cfd002ab087fba1a0726ec0b4f5 + languageName: node + linkType: hard + +"pascal-case@npm:^3.1.2": + version: 3.1.2 + resolution: "pascal-case@npm:3.1.2" + dependencies: + no-case: "npm:^3.0.4" + tslib: "npm:^2.0.3" + checksum: 10c0/05ff7c344809fd272fc5030ae0ee3da8e4e63f36d47a1e0a4855ca59736254192c5a27b5822ed4bae96e54048eec5f6907713cfcfff7cdf7a464eaf7490786d8 + languageName: node + linkType: hard + +"path-exists@npm:^3.0.0": + version: 3.0.0 + resolution: "path-exists@npm:3.0.0" + checksum: 10c0/17d6a5664bc0a11d48e2b2127d28a0e58822c6740bde30403f08013da599182289c56518bec89407e3f31d3c2b6b296a4220bc3f867f0911fee6952208b04167 + languageName: node + linkType: hard + +"path-exists@npm:^4.0.0": + version: 4.0.0 + resolution: "path-exists@npm:4.0.0" + checksum: 10c0/8c0bd3f5238188197dc78dced15207a4716c51cc4e3624c44fc97acf69558f5ebb9a2afff486fe1b4ee148e0c133e96c5e11a9aa5c48a3006e3467da070e5e1b + languageName: node + linkType: hard + +"path-exists@npm:^5.0.0": + version: 5.0.0 + resolution: "path-exists@npm:5.0.0" + checksum: 10c0/b170f3060b31604cde93eefdb7392b89d832dfbc1bed717c9718cbe0f230c1669b7e75f87e19901da2250b84d092989a0f9e44d2ef41deb09aa3ad28e691a40a + languageName: node + linkType: hard + +"path-is-absolute@npm:^1.0.0": + version: 1.0.1 + resolution: "path-is-absolute@npm:1.0.1" + checksum: 10c0/127da03c82172a2a50099cddbf02510c1791fc2cc5f7713ddb613a56838db1e8168b121a920079d052e0936c23005562059756d653b7c544c53185efe53be078 + languageName: node + linkType: hard + +"path-is-inside@npm:1.0.2": + version: 1.0.2 + resolution: "path-is-inside@npm:1.0.2" + checksum: 10c0/7fdd4b41672c70461cce734fc222b33e7b447fa489c7c4377c95e7e6852d83d69741f307d88ec0cc3b385b41cb4accc6efac3c7c511cd18512e95424f5fa980c + languageName: node + linkType: hard + +"path-key@npm:^3.0.0, path-key@npm:^3.1.0": + version: 3.1.1 + resolution: "path-key@npm:3.1.1" + checksum: 10c0/748c43efd5a569c039d7a00a03b58eecd1d75f3999f5a28303d75f521288df4823bc057d8784eb72358b2895a05f29a070bc9f1f17d28226cc4e62494cc58c4c + languageName: node + linkType: hard + +"path-parse@npm:^1.0.7": + version: 1.0.7 + resolution: "path-parse@npm:1.0.7" + checksum: 10c0/11ce261f9d294cc7a58d6a574b7f1b935842355ec66fba3c3fd79e0f036462eaf07d0aa95bb74ff432f9afef97ce1926c720988c6a7451d8a584930ae7de86e1 + languageName: node + linkType: hard + +"path-root-regex@npm:^0.1.0": + version: 0.1.2 + resolution: "path-root-regex@npm:0.1.2" + checksum: 10c0/27651a234f280c70d982dd25c35550f74a4284cde6b97237aab618cb4b5745682d18cdde1160617bb4a4b6b8aec4fbc911c4a2ad80d01fa4c7ee74dae7af2337 + languageName: node + linkType: hard + +"path-root@npm:^0.1.1": + version: 0.1.1 + resolution: "path-root@npm:0.1.1" + dependencies: + path-root-regex: "npm:^0.1.0" + checksum: 10c0/aed5cd290df84c46c7730f6a363e95e47a23929b51ab068a3818d69900da3e89dc154cdfd0c45c57b2e02f40c094351bc862db70c2cb00b7e6bd47039a227813 + languageName: node + linkType: hard + +"path-scurry@npm:^1.11.1": + version: 1.11.1 + resolution: "path-scurry@npm:1.11.1" + dependencies: + lru-cache: "npm:^10.2.0" + minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" + checksum: 10c0/32a13711a2a505616ae1cc1b5076801e453e7aae6ac40ab55b388bb91b9d0547a52f5aaceff710ea400205f18691120d4431e520afbe4266b836fadede15872d + languageName: node + linkType: hard + +"path-to-regexp@npm:0.1.10": + version: 0.1.10 + resolution: "path-to-regexp@npm:0.1.10" + checksum: 10c0/34196775b9113ca6df88e94c8d83ba82c0e1a2063dd33bfe2803a980da8d49b91db8104f49d5191b44ea780d46b8670ce2b7f4a5e349b0c48c6779b653f1afe4 + languageName: node + linkType: hard + +"path-to-regexp@npm:3.3.0": + version: 3.3.0 + resolution: "path-to-regexp@npm:3.3.0" + checksum: 10c0/ffa0ebe7088d38d435a8d08b0fe6e8c93ceb2a81a65d4dd1d9a538f52e09d5e3474ed5f553cb3b180d894b0caa10698a68737ab599fd1e56b4663d1a64c9f77b + languageName: node + linkType: hard + +"path-to-regexp@npm:^1.7.0": + version: 1.9.0 + resolution: "path-to-regexp@npm:1.9.0" + dependencies: + isarray: "npm:0.0.1" + checksum: 10c0/de9ddb01b84d9c2c8e2bed18630d8d039e2d6f60a6538595750fa08c7a6482512257464c8da50616f266ab2cdd2428387e85f3b089e4c3f25d0c537e898a0751 + languageName: node + linkType: hard + +"path-type@npm:^4.0.0": + version: 4.0.0 + resolution: "path-type@npm:4.0.0" + checksum: 10c0/666f6973f332f27581371efaf303fd6c272cc43c2057b37aa99e3643158c7e4b2626549555d88626e99ea9e046f82f32e41bbde5f1508547e9a11b149b52387c + languageName: node + linkType: hard + +"periscopic@npm:^3.0.0": + version: 3.1.0 + resolution: "periscopic@npm:3.1.0" + dependencies: + "@types/estree": "npm:^1.0.0" + estree-walker: "npm:^3.0.0" + is-reference: "npm:^3.0.0" + checksum: 10c0/fb5ce7cd810c49254cdf1cd3892811e6dd1a1dfbdf5f10a0a33fb7141baac36443c4cad4f0e2b30abd4eac613f6ab845c2bc1b7ce66ae9694c7321e6ada5bd96 + languageName: node + linkType: hard + +"picocolors@npm:^1.0.0, picocolors@npm:^1.0.1": + version: 1.0.1 + resolution: "picocolors@npm:1.0.1" + checksum: 10c0/c63cdad2bf812ef0d66c8db29583802355d4ca67b9285d846f390cc15c2f6ccb94e8cb7eb6a6e97fc5990a6d3ad4ae42d86c84d3146e667c739a4234ed50d400 + languageName: node + linkType: hard + +"picomatch@npm:^2.0.4, picomatch@npm:^2.2.1, picomatch@npm:^2.2.3, picomatch@npm:^2.3.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 10c0/26c02b8d06f03206fc2ab8d16f19960f2ff9e81a658f831ecb656d8f17d9edc799e8364b1f4a7873e89d9702dff96204be0fa26fe4181f6843f040f819dac4be + languageName: node + linkType: hard + +"pkg-dir@npm:^7.0.0": + version: 7.0.0 + resolution: "pkg-dir@npm:7.0.0" + dependencies: + find-up: "npm:^6.3.0" + checksum: 10c0/1afb23d2efb1ec9d8b2c4a0c37bf146822ad2774f074cb05b853be5dca1b40815c5960dd126df30ab8908349262a266f31b771e877235870a3b8fd313beebec5 + languageName: node + linkType: hard + +"pkg-up@npm:^3.1.0": + version: 3.1.0 + resolution: "pkg-up@npm:3.1.0" + dependencies: + find-up: "npm:^3.0.0" + checksum: 10c0/ecb60e1f8e1f611c0bdf1a0b6a474d6dfb51185567dc6f29cdef37c8d480ecba5362e006606bb290519bbb6f49526c403fabea93c3090c20368d98bb90c999ab + languageName: node + linkType: hard + +"possible-typed-array-names@npm:^1.0.0": + version: 1.0.0 + resolution: "possible-typed-array-names@npm:1.0.0" + checksum: 10c0/d9aa22d31f4f7680e20269db76791b41c3a32c01a373e25f8a4813b4d45f7456bfc2b6d68f752dc4aab0e0bb0721cb3d76fb678c9101cb7a16316664bc2c73fd + languageName: node + linkType: hard + +"postcss-calc@npm:^9.0.1": + version: 9.0.1 + resolution: "postcss-calc@npm:9.0.1" + dependencies: + postcss-selector-parser: "npm:^6.0.11" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.2.2 + checksum: 10c0/e0df07337162dbcaac5d6e030c7fd289e21da8766a9daca5d6b2b3c8094bb524ae5d74c70048ea7fe5fe4960ce048c60ac97922d917c3bbff34f58e9d2b0eb0e + languageName: node + linkType: hard + +"postcss-colormin@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-colormin@npm:6.1.0" + dependencies: + browserslist: "npm:^4.23.0" + caniuse-api: "npm:^3.0.0" + colord: "npm:^2.9.3" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/0802963fa0d8f2fe408b2e088117670f5303c69a58c135f0ecf0e5ceff69e95e87111b22c4e29c9adb2f69aa8d3bc175f4e8e8708eeb99c9ffc36c17064de427 + languageName: node + linkType: hard + +"postcss-convert-values@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-convert-values@npm:6.1.0" + dependencies: + browserslist: "npm:^4.23.0" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/a80066965cb58fe8fcaf79f306b32c83fc678e1f0678e43f4db3e9fee06eed6db92cf30631ad348a17492769d44757400493c91a33ee865ee8dedea9234a11f5 + languageName: node + linkType: hard + +"postcss-discard-comments@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-discard-comments@npm:6.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/338a1fcba7e2314d956e5e5b9bd1e12e6541991bf85ac72aed6e229a029bf60edb31f11576b677623576169aa7d9c75e1be259ac7b50d0b735b841b5518f9da9 + languageName: node + linkType: hard + +"postcss-discard-duplicates@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-discard-duplicates@npm:6.0.3" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/24d2f00e54668f2837eb38a64b1751d7a4a73b2752f9749e61eb728f1fae837984bc2b339f7f5207aff5f66f72551253489114b59b9ba21782072677a81d7d1b + languageName: node + linkType: hard + +"postcss-discard-empty@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-discard-empty@npm:6.0.3" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/1af08bb29f18eda41edf3602b257d89a4cf0a16f79fc773cfebd4a37251f8dbd9b77ac18efe55d0677d000b43a8adf2ef9328d31961c810e9433a38494a1fa65 + languageName: node + linkType: hard + +"postcss-discard-overridden@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-discard-overridden@npm:6.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/fda70ef3cd4cb508369c5bbbae44d7760c40ec9f2e65df1cd1b6e0314317fb1d25ae7f64987ca84e66889c1e9d1862487a6ce391c159dfe04d536597bfc5030d + languageName: node + linkType: hard + +"postcss-discard-unused@npm:^6.0.5": + version: 6.0.5 + resolution: "postcss-discard-unused@npm:6.0.5" + dependencies: + postcss-selector-parser: "npm:^6.0.16" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/fca82f17395a7fcc78eab4e03dfb05958beb240c10cacb3836b832c6ea99f5259980c70890a9b7d8b67adf8071b61f3fcf1b432c7a116397aaf67909366da5cc + languageName: node + linkType: hard + +"postcss-loader@npm:^7.3.3": + version: 7.3.4 + resolution: "postcss-loader@npm:7.3.4" + dependencies: + cosmiconfig: "npm:^8.3.5" + jiti: "npm:^1.20.0" + semver: "npm:^7.5.4" + peerDependencies: + postcss: ^7.0.0 || ^8.0.1 + webpack: ^5.0.0 + checksum: 10c0/1bf7614aeea9ad1f8ee6be3a5451576c059391688ea67f825aedc2674056369597faeae4e4a81fe10843884c9904a71403d9a54197e1f560e8fbb9e61f2a2680 + languageName: node + linkType: hard + +"postcss-merge-idents@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-merge-idents@npm:6.0.3" + dependencies: + cssnano-utils: "npm:^4.0.2" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/fdb51d971df33218bd5fdd9619e5a4d854e23affcea51f96bf4391260cb8d0bec937854582fa9a19bde1fa1b2a43fa5a2f179da23a3adeb8e8d292a4749a8ed7 + languageName: node + linkType: hard + +"postcss-merge-longhand@npm:^6.0.5": + version: 6.0.5 + resolution: "postcss-merge-longhand@npm:6.0.5" + dependencies: + postcss-value-parser: "npm:^4.2.0" + stylehacks: "npm:^6.1.1" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/5a223a7f698c05ab42e9997108a7ff27ea1e0c33a11a353d65a04fc89c3b5b750b9e749550d76b6406329117a055adfc79dde7fee48dca5c8e167a2854ae3fea + languageName: node + linkType: hard + +"postcss-merge-rules@npm:^6.1.1": + version: 6.1.1 + resolution: "postcss-merge-rules@npm:6.1.1" + dependencies: + browserslist: "npm:^4.23.0" + caniuse-api: "npm:^3.0.0" + cssnano-utils: "npm:^4.0.2" + postcss-selector-parser: "npm:^6.0.16" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/6d8952dbb19b1e59bf5affe0871fa1be6515103466857cff5af879d6cf619659f8642ec7a931cabb7cdbd393d8c1e91748bf70bee70fa3edea010d4e25786d04 + languageName: node + linkType: hard + +"postcss-minify-font-values@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-minify-font-values@npm:6.1.0" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/0d6567170c22a7db42096b5eac298f041614890fbe01759a9fa5ccda432f2bb09efd399d92c11bf6675ae13ccd259db4602fad3c358317dee421df5f7ab0a003 + languageName: node + linkType: hard + +"postcss-minify-gradients@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-minify-gradients@npm:6.0.3" + dependencies: + colord: "npm:^2.9.3" + cssnano-utils: "npm:^4.0.2" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/7fcbcec94fe5455b89fe1b424a451198e60e0407c894bbacdc062d9fdef2f8571b483b5c3bb17f22d2f1249431251b2de22e1e4e8b0614d10624f8ee6e71afd2 + languageName: node + linkType: hard + +"postcss-minify-params@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-minify-params@npm:6.1.0" + dependencies: + browserslist: "npm:^4.23.0" + cssnano-utils: "npm:^4.0.2" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/e5c38c3e5fb42e2ca165764f983716e57d854a63a477f7389ccc94cd2ab8123707006613bd7f29acc6eafd296fff513aa6d869c98ac52590f886d641cb21a59e + languageName: node + linkType: hard + +"postcss-minify-selectors@npm:^6.0.4": + version: 6.0.4 + resolution: "postcss-minify-selectors@npm:6.0.4" + dependencies: + postcss-selector-parser: "npm:^6.0.16" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/695ec2e1e3a7812b0cabe1105d0ed491760be3d8e9433914fb5af1fc30a84e6dc24089cd31b7e300de620b8e7adf806526c1acf8dd14077a7d1d2820c60a327c + languageName: node + linkType: hard + +"postcss-modules-extract-imports@npm:^3.1.0": + version: 3.1.0 + resolution: "postcss-modules-extract-imports@npm:3.1.0" + peerDependencies: + postcss: ^8.1.0 + checksum: 10c0/402084bcab376083c4b1b5111b48ec92974ef86066f366f0b2d5b2ac2b647d561066705ade4db89875a13cb175b33dd6af40d16d32b2ea5eaf8bac63bd2bf219 + languageName: node + linkType: hard + +"postcss-modules-local-by-default@npm:^4.0.5": + version: 4.0.5 + resolution: "postcss-modules-local-by-default@npm:4.0.5" + dependencies: + icss-utils: "npm:^5.0.0" + postcss-selector-parser: "npm:^6.0.2" + postcss-value-parser: "npm:^4.1.0" + peerDependencies: + postcss: ^8.1.0 + checksum: 10c0/f4ad35abeb685ecb25f80c93d9fe23c8b89ee45ac4185f3560e701b4d7372f9b798577e79c5ed03b6d9c80bc923b001210c127c04ced781f43cda9e32b202a5b + languageName: node + linkType: hard + +"postcss-modules-scope@npm:^3.2.0": + version: 3.2.0 + resolution: "postcss-modules-scope@npm:3.2.0" + dependencies: + postcss-selector-parser: "npm:^6.0.4" + peerDependencies: + postcss: ^8.1.0 + checksum: 10c0/a2f5ffe372169b3feb8628cd785eb748bf12e344cfa57bce9e5cdc4fa5adcdb40d36daa86bb35dad53427703b185772aad08825b5783f745fcb1b6039454a84b + languageName: node + linkType: hard + +"postcss-modules-values@npm:^4.0.0": + version: 4.0.0 + resolution: "postcss-modules-values@npm:4.0.0" + dependencies: + icss-utils: "npm:^5.0.0" + peerDependencies: + postcss: ^8.1.0 + checksum: 10c0/dd18d7631b5619fb9921b198c86847a2a075f32e0c162e0428d2647685e318c487a2566cc8cc669fc2077ef38115cde7a068e321f46fb38be3ad49646b639dbc + languageName: node + linkType: hard + +"postcss-normalize-charset@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-charset@npm:6.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/af32a3b4cf94163d728b8aa935b2494c9f69fbc96a33b35f67ae15dbdef7fcc8732569df97cbaaf20ca6c0103c39adad0cfce2ba07ffed283796787f6c36f410 + languageName: node + linkType: hard + +"postcss-normalize-display-values@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-display-values@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/782761850c7e697fdb6c3ff53076de716a71b60f9e835efb2f7ef238de347c88b5d55f0d43cf5c608e1ee58de65360e3d9fccd5f20774bba08ded7c87d8a5651 + languageName: node + linkType: hard + +"postcss-normalize-positions@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-positions@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/9fdd42a47226bbda5f68774f3c4c3a90eb4fa708aef5a997c6a52fe6cac06585c9774038fe3bc1aa86a203c29223b8d8db6ebe7580c1aa293154f2b48db0b038 + languageName: node + linkType: hard + +"postcss-normalize-repeat-style@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-repeat-style@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/9133ccbdf1286920c1cd0d01c1c5fa0bd3251b717f2f3e47d691dcc44978ac1dc419d20d9ae5428bd48ee542059e66b823ba699356f5968ccced5606c7c7ca34 + languageName: node + linkType: hard + +"postcss-normalize-string@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-string@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/fecc2d52c4029b24fecf2ca2fb45df5dbdf9f35012194ad4ea80bc7be3252cdcb21a0976400902320595aa6178f2cc625cc804c6b6740aef6efa42105973a205 + languageName: node + linkType: hard + +"postcss-normalize-timing-functions@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-timing-functions@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/a22af0b3374704e59ae70bbbcc66b7029137e284f04e30a2ad548818d1540d6c1ed748dd8f689b9b6df5c1064085a00ad07b6f7e25ffaad49d4e661b616cdeae + languageName: node + linkType: hard + +"postcss-normalize-unicode@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-normalize-unicode@npm:6.1.0" + dependencies: + browserslist: "npm:^4.23.0" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/ff5746670d94dd97b49a0955c3c71ff516fb4f54bbae257f877d179bacc44a62e50a0fd6e7ddf959f2ca35c335de4266b0c275d880bb57ad7827189339ab1582 + languageName: node + linkType: hard + +"postcss-normalize-url@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-url@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/4718f1c0657788d2c560b340ee8e0a4eb3eb053eba6fbbf489e9a6e739b4c5f9ce1957f54bd03497c50a1f39962bf6ab9ff6ba4976b69dd160f6afd1670d69b7 + languageName: node + linkType: hard + +"postcss-normalize-whitespace@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-normalize-whitespace@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/d5275a88e29a894aeb83a2a833e816d2456dbf3f39961628df596ce205dcc4895186a023812ff691945e0804241ccc53e520d16591b5812288474b474bbaf652 + languageName: node + linkType: hard + +"postcss-ordered-values@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-ordered-values@npm:6.0.2" + dependencies: + cssnano-utils: "npm:^4.0.2" + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/aece23a289228aa804217a85f8da198d22b9123f02ca1310b81834af380d6fbe115e4300683599b4a2ab7f1c6a1dbd6789724c47c38e2b0a3774f2ea4b4f0963 + languageName: node + linkType: hard + +"postcss-reduce-idents@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-reduce-idents@npm:6.0.3" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/d9f9209e52ebb3d1d7feefc0be24fc74792e064e0fdec99554f050c6b882c61073d5d40986c545061b30e5ead881615e92c965dc765d8d83b2dec10d6a664e1f + languageName: node + linkType: hard + +"postcss-reduce-initial@npm:^6.1.0": + version: 6.1.0 + resolution: "postcss-reduce-initial@npm:6.1.0" + dependencies: + browserslist: "npm:^4.23.0" + caniuse-api: "npm:^3.0.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/a8f28cf51ce9a1b9423cce1a01c1d7cbee90125930ec36435a0073e73aef402d90affe2fd3600c964b679cf738869fda447b95a9acce74414e9d67d5c6ba8646 + languageName: node + linkType: hard + +"postcss-reduce-transforms@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-reduce-transforms@npm:6.0.2" + dependencies: + postcss-value-parser: "npm:^4.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/755ef27b3d083f586ac831f0c611a66e76f504d27e2100dc7674f6b86afad597901b4520cb889fe58ca70e852aa7fd0c0acb69a63d39dfe6a95860b472394e7c + languageName: node + linkType: hard + +"postcss-selector-parser@npm:^6.0.11, postcss-selector-parser@npm:^6.0.16, postcss-selector-parser@npm:^6.0.2, postcss-selector-parser@npm:^6.0.4": + version: 6.1.2 + resolution: "postcss-selector-parser@npm:6.1.2" + dependencies: + cssesc: "npm:^3.0.0" + util-deprecate: "npm:^1.0.2" + checksum: 10c0/523196a6bd8cf660bdf537ad95abd79e546d54180f9afb165a4ab3e651ac705d0f8b8ce6b3164fb9e3279ce482c5f751a69eb2d3a1e8eb0fd5e82294fb3ef13e + languageName: node + linkType: hard + +"postcss-sort-media-queries@npm:^5.2.0": + version: 5.2.0 + resolution: "postcss-sort-media-queries@npm:5.2.0" + dependencies: + sort-css-media-queries: "npm:2.2.0" + peerDependencies: + postcss: ^8.4.23 + checksum: 10c0/5e7f265a21999bdbf6592f7e15b3e889dd93bc9b15fe048958e8f85603ac276e69ef50305e8b41b10f4eea68917c9c25c7956fa9c3ba7f8577c1149416d35c4e + languageName: node + linkType: hard + +"postcss-svgo@npm:^6.0.3": + version: 6.0.3 + resolution: "postcss-svgo@npm:6.0.3" + dependencies: + postcss-value-parser: "npm:^4.2.0" + svgo: "npm:^3.2.0" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/994b15a88cbb411f32cfa98957faa5623c76f2d75fede51f5f47238f06b367ebe59c204fecbdaf21ccb9e727239a4b290087e04c502392658a0c881ddfbd61f2 + languageName: node + linkType: hard + +"postcss-unique-selectors@npm:^6.0.4": + version: 6.0.4 + resolution: "postcss-unique-selectors@npm:6.0.4" + dependencies: + postcss-selector-parser: "npm:^6.0.16" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/bfb99d8a7c675c93f2e65c9d9d563477bfd46fdce9e2727d42d57982b31ccbaaf944e8034bfbefe48b3119e77fba7eb1b181c19b91cb3a5448058fa66a7c9ae9 + languageName: node + linkType: hard + +"postcss-value-parser@npm:^4.1.0, postcss-value-parser@npm:^4.2.0": + version: 4.2.0 + resolution: "postcss-value-parser@npm:4.2.0" + checksum: 10c0/f4142a4f56565f77c1831168e04e3effd9ffcc5aebaf0f538eee4b2d465adfd4b85a44257bb48418202a63806a7da7fe9f56c330aebb3cac898e46b4cbf49161 + languageName: node + linkType: hard + +"postcss-zindex@npm:^6.0.2": + version: 6.0.2 + resolution: "postcss-zindex@npm:6.0.2" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/346291703e1f2dd954144d2bb251713dad6ae10e8aa05c3873dee2fc7a30d72da7866bec060abd932b9b839bc1495f73d813dde5312750a69d7ad33c435ce7ea + languageName: node + linkType: hard + +"postcss@npm:^8.4.21, postcss@npm:^8.4.24, postcss@npm:^8.4.26, postcss@npm:^8.4.33, postcss@npm:^8.4.38": + version: 8.4.41 + resolution: "postcss@npm:8.4.41" + dependencies: + nanoid: "npm:^3.3.7" + picocolors: "npm:^1.0.1" + source-map-js: "npm:^1.2.0" + checksum: 10c0/c1828fc59e7ec1a3bf52b3a42f615dba53c67960ed82a81df6441b485fe43c20aba7f4e7c55425762fd99c594ecabbaaba8cf5b30fd79dfec5b52a9f63a2d690 + languageName: node + linkType: hard + +"prebuild-install@npm:^7.1.1": + version: 7.1.2 + resolution: "prebuild-install@npm:7.1.2" + dependencies: + detect-libc: "npm:^2.0.0" + expand-template: "npm:^2.0.3" + github-from-package: "npm:0.0.0" + minimist: "npm:^1.2.3" + mkdirp-classic: "npm:^0.5.3" + napi-build-utils: "npm:^1.0.1" + node-abi: "npm:^3.3.0" + pump: "npm:^3.0.0" + rc: "npm:^1.2.7" + simple-get: "npm:^4.0.0" + tar-fs: "npm:^2.0.0" + tunnel-agent: "npm:^0.6.0" + bin: + prebuild-install: bin.js + checksum: 10c0/e64868ba9ef2068fd7264f5b03e5298a901e02a450acdb1f56258d88c09dea601eefdb3d1dfdff8513fdd230a92961712be0676192626a3b4d01ba154d48bdd3 + languageName: node + linkType: hard + +"prelude-ls@npm:^1.2.1": + version: 1.2.1 + resolution: "prelude-ls@npm:1.2.1" + checksum: 10c0/b00d617431e7886c520a6f498a2e14c75ec58f6d93ba48c3b639cf241b54232d90daa05d83a9e9b9fef6baa63cb7e1e4602c2372fea5bc169668401eb127d0cd + languageName: node + linkType: hard + +"prettier-eslint@npm:^16.3.0": + version: 16.3.0 + resolution: "prettier-eslint@npm:16.3.0" + dependencies: + "@typescript-eslint/parser": "npm:^6.7.5" + common-tags: "npm:^1.4.0" + dlv: "npm:^1.1.0" + eslint: "npm:^8.7.0" + indent-string: "npm:^4.0.0" + lodash.merge: "npm:^4.6.0" + loglevel-colored-level-prefix: "npm:^1.0.0" + prettier: "npm:^3.0.1" + pretty-format: "npm:^29.7.0" + require-relative: "npm:^0.8.7" + typescript: "npm:^5.2.2" + vue-eslint-parser: "npm:^9.1.0" + peerDependencies: + prettier-plugin-svelte: ^3.0.0 + svelte-eslint-parser: "*" + peerDependenciesMeta: + prettier-plugin-svelte: + optional: true + svelte-eslint-parser: + optional: true + checksum: 10c0/9efe0fbfceee8d635c1cdb62bd8442b5c4f4b8045d8ec438f5ab0acc2d696c42723c67a4f9b9a660975c39ea95cedb60b310cd77aefe302faf7d0e9ff67e0cae + languageName: node + linkType: hard + +"prettier-linter-helpers@npm:^1.0.0": + version: 1.0.0 + resolution: "prettier-linter-helpers@npm:1.0.0" + dependencies: + fast-diff: "npm:^1.1.2" + checksum: 10c0/81e0027d731b7b3697ccd2129470ed9913ecb111e4ec175a12f0fcfab0096516373bf0af2fef132af50cafb0a905b74ff57996d615f59512bb9ac7378fcc64ab + languageName: node + linkType: hard + +"prettier@npm:^3.0.1, prettier@npm:^3.3.3": + version: 3.3.3 + resolution: "prettier@npm:3.3.3" + bin: + prettier: bin/prettier.cjs + checksum: 10c0/b85828b08e7505716324e4245549b9205c0cacb25342a030ba8885aba2039a115dbcf75a0b7ca3b37bc9d101ee61fab8113fc69ca3359f2a226f1ecc07ad2e26 + languageName: node + linkType: hard + +"pretty-error@npm:^4.0.0": + version: 4.0.0 + resolution: "pretty-error@npm:4.0.0" + dependencies: + lodash: "npm:^4.17.20" + renderkid: "npm:^3.0.0" + checksum: 10c0/dc292c087e2857b2e7592784ab31e37a40f3fa918caa11eba51f9fb2853e1d4d6e820b219917e35f5721d833cfd20fdf4f26ae931a90fd1ad0cae2125c345138 + languageName: node + linkType: hard + +"pretty-format@npm:^29.7.0": + version: 29.7.0 + resolution: "pretty-format@npm:29.7.0" + dependencies: + "@jest/schemas": "npm:^29.6.3" + ansi-styles: "npm:^5.0.0" + react-is: "npm:^18.0.0" + checksum: 10c0/edc5ff89f51916f036c62ed433506b55446ff739358de77207e63e88a28ca2894caac6e73dcb68166a606e51c8087d32d400473e6a9fdd2dbe743f46c9c0276f + languageName: node + linkType: hard + +"pretty-time@npm:^1.1.0": + version: 1.1.0 + resolution: "pretty-time@npm:1.1.0" + checksum: 10c0/ba9d7af19cd43838fb2b147654990949575e400dc2cc24bf71ec4a6c4033a38ba8172b1014b597680c6d4d3c075e94648b2c13a7206c5f0c90b711c7388726f3 + languageName: node + linkType: hard + +"prism-react-renderer@npm:^2.3.0": + version: 2.3.1 + resolution: "prism-react-renderer@npm:2.3.1" + dependencies: + "@types/prismjs": "npm:^1.26.0" + clsx: "npm:^2.0.0" + peerDependencies: + react: ">=16.0.0" + checksum: 10c0/566932127ca18049a651aa038a8f8c7c1ca15950d21b659c2ce71fd95bd03bef2b5d40c489e7aa3453eaf15d984deef542a609d7842e423e6a13427dd90bd371 + languageName: node + linkType: hard + +"prism-react-renderer@npm:^2.4.0": + version: 2.4.0 + resolution: "prism-react-renderer@npm:2.4.0" + dependencies: + "@types/prismjs": "npm:^1.26.0" + clsx: "npm:^2.0.0" + peerDependencies: + react: ">=16.0.0" + checksum: 10c0/3d6969b057da0efe39e3e637bf93601cd5757de5919180e8df16daf1d1b8eedc39b70c7f6f28724fba0a01bc857c6b78312ab027f4e913159d1165c5aba235bb + languageName: node + linkType: hard + +"prismjs@npm:^1.29.0": + version: 1.29.0 + resolution: "prismjs@npm:1.29.0" + checksum: 10c0/d906c4c4d01b446db549b4f57f72d5d7e6ccaca04ecc670fb85cea4d4b1acc1283e945a9cbc3d81819084a699b382f970e02f9d1378e14af9808d366d9ed7ec6 + languageName: node + linkType: hard + +"proc-log@npm:^4.0.0, proc-log@npm:^4.1.0, proc-log@npm:^4.2.0": + version: 4.2.0 + resolution: "proc-log@npm:4.2.0" + checksum: 10c0/17db4757c2a5c44c1e545170e6c70a26f7de58feb985091fb1763f5081cab3d01b181fb2dd240c9f4a4255a1d9227d163d5771b7e69c9e49a561692db865efb9 + languageName: node + linkType: hard + +"process-nextick-args@npm:~2.0.0": + version: 2.0.1 + resolution: "process-nextick-args@npm:2.0.1" + checksum: 10c0/bec089239487833d46b59d80327a1605e1c5287eaad770a291add7f45fda1bb5e28b38e0e061add0a1d0ee0984788ce74fa394d345eed1c420cacf392c554367 + languageName: node + linkType: hard + +"promise-inflight@npm:^1.0.1": + version: 1.0.1 + resolution: "promise-inflight@npm:1.0.1" + checksum: 10c0/d179d148d98fbff3d815752fa9a08a87d3190551d1420f17c4467f628214db12235ae068d98cd001f024453676d8985af8f28f002345646c4ece4600a79620bc + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: "npm:^2.0.2" + retry: "npm:^0.12.0" + checksum: 10c0/9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 + languageName: node + linkType: hard + +"prompts@npm:^2.4.2": + version: 2.4.2 + resolution: "prompts@npm:2.4.2" + dependencies: + kleur: "npm:^3.0.3" + sisteransi: "npm:^1.0.5" + checksum: 10c0/16f1ac2977b19fe2cf53f8411cc98db7a3c8b115c479b2ca5c82b5527cd937aa405fa04f9a5960abeb9daef53191b53b4d13e35c1f5d50e8718c76917c5f1ea4 + languageName: node + linkType: hard + +"prop-types@npm:^15.0.0, prop-types@npm:^15.6.2, prop-types@npm:^15.7.2, prop-types@npm:^15.8.1": + version: 15.8.1 + resolution: "prop-types@npm:15.8.1" + dependencies: + loose-envify: "npm:^1.4.0" + object-assign: "npm:^4.1.1" + react-is: "npm:^16.13.1" + checksum: 10c0/59ece7ca2fb9838031d73a48d4becb9a7cc1ed10e610517c7d8f19a1e02fa47f7c27d557d8a5702bec3cfeccddc853579832b43f449e54635803f277b1c78077 + languageName: node + linkType: hard + +"property-information@npm:^6.0.0": + version: 6.5.0 + resolution: "property-information@npm:6.5.0" + checksum: 10c0/981e0f9cc2e5acdb414a6fd48a99dd0fd3a4079e7a91ab41cf97a8534cf43e0e0bc1ffada6602a1b3d047a33db8b5fc2ef46d863507eda712d5ceedac443f0ef + languageName: node + linkType: hard + +"proto-list@npm:~1.2.1": + version: 1.2.4 + resolution: "proto-list@npm:1.2.4" + checksum: 10c0/b9179f99394ec8a68b8afc817690185f3b03933f7b46ce2e22c1930dc84b60d09f5ad222beab4e59e58c6c039c7f7fcf620397235ef441a356f31f9744010e12 + languageName: node + linkType: hard + +"proxy-addr@npm:~2.0.7": + version: 2.0.7 + resolution: "proxy-addr@npm:2.0.7" + dependencies: + forwarded: "npm:0.2.0" + ipaddr.js: "npm:1.9.1" + checksum: 10c0/c3eed999781a35f7fd935f398b6d8920b6fb00bbc14287bc6de78128ccc1a02c89b95b56742bf7cf0362cc333c61d138532049c7dedc7a328ef13343eff81210 + languageName: node + linkType: hard + +"pump@npm:^3.0.0": + version: 3.0.0 + resolution: "pump@npm:3.0.0" + dependencies: + end-of-stream: "npm:^1.1.0" + once: "npm:^1.3.1" + checksum: 10c0/bbdeda4f747cdf47db97428f3a135728669e56a0ae5f354a9ac5b74556556f5446a46f720a8f14ca2ece5be9b4d5d23c346db02b555f46739934cc6c093a5478 + languageName: node + linkType: hard + +"punycode@npm:^1.3.2": + version: 1.4.1 + resolution: "punycode@npm:1.4.1" + checksum: 10c0/354b743320518aef36f77013be6e15da4db24c2b4f62c5f1eb0529a6ed02fbaf1cb52925785f6ab85a962f2b590d9cd5ad730b70da72b5f180e2556b8bd3ca08 + languageName: node + linkType: hard + +"punycode@npm:^2.1.0": + version: 2.3.1 + resolution: "punycode@npm:2.3.1" + checksum: 10c0/14f76a8206bc3464f794fb2e3d3cc665ae416c01893ad7a02b23766eb07159144ee612ad67af5e84fa4479ccfe67678c4feb126b0485651b302babf66f04f9e9 + languageName: node + linkType: hard + +"pupa@npm:^3.1.0": + version: 3.1.0 + resolution: "pupa@npm:3.1.0" + dependencies: + escape-goat: "npm:^4.0.0" + checksum: 10c0/02afa6e4547a733484206aaa8f8eb3fbfb12d3dd17d7ca4fa1ea390a7da2cb8f381e38868bbf68009c4d372f8f6059f553171b6a712d8f2802c7cd43d513f06c + languageName: node + linkType: hard + +"qs@npm:6.13.0": + version: 6.13.0 + resolution: "qs@npm:6.13.0" + dependencies: + side-channel: "npm:^1.0.6" + checksum: 10c0/62372cdeec24dc83a9fb240b7533c0fdcf0c5f7e0b83343edd7310f0ab4c8205a5e7c56406531f2e47e1b4878a3821d652be4192c841de5b032ca83619d8f860 + languageName: node + linkType: hard + +"queue-microtask@npm:^1.2.2": + version: 1.2.3 + resolution: "queue-microtask@npm:1.2.3" + checksum: 10c0/900a93d3cdae3acd7d16f642c29a642aea32c2026446151f0778c62ac089d4b8e6c986811076e1ae180a694cedf077d453a11b58ff0a865629a4f82ab558e102 + languageName: node + linkType: hard + +"queue-tick@npm:^1.0.1": + version: 1.0.1 + resolution: "queue-tick@npm:1.0.1" + checksum: 10c0/0db998e2c9b15215317dbcf801e9b23e6bcde4044e115155dae34f8e7454b9a783f737c9a725528d677b7a66c775eb7a955cf144fe0b87f62b575ce5bfd515a9 + languageName: node + linkType: hard + +"queue@npm:6.0.2": + version: 6.0.2 + resolution: "queue@npm:6.0.2" + dependencies: + inherits: "npm:~2.0.3" + checksum: 10c0/cf987476cc72e7d3aaabe23ccefaab1cd757a2b5e0c8d80b67c9575a6b5e1198807ffd4f0948a3f118b149d1111d810ee773473530b77a5c606673cac2c9c996 + languageName: node + linkType: hard + +"quick-lru@npm:^5.1.1": + version: 5.1.1 + resolution: "quick-lru@npm:5.1.1" + checksum: 10c0/a24cba5da8cec30d70d2484be37622580f64765fb6390a928b17f60cd69e8dbd32a954b3ff9176fa1b86d86ff2ba05252fae55dc4d40d0291c60412b0ad096da + languageName: node + linkType: hard + +"randombytes@npm:^2.1.0": + version: 2.1.0 + resolution: "randombytes@npm:2.1.0" + dependencies: + safe-buffer: "npm:^5.1.0" + checksum: 10c0/50395efda7a8c94f5dffab564f9ff89736064d32addf0cc7e8bf5e4166f09f8ded7a0849ca6c2d2a59478f7d90f78f20d8048bca3cdf8be09d8e8a10790388f3 + languageName: node + linkType: hard + +"range-parser@npm:1.2.0": + version: 1.2.0 + resolution: "range-parser@npm:1.2.0" + checksum: 10c0/c7aef4f6588eb974c475649c157f197d07437d8c6c8ff7e36280a141463fb5ab7a45918417334ebd7b665c6b8321cf31c763f7631dd5f5db9372249261b8b02a + languageName: node + linkType: hard + +"range-parser@npm:^1.2.1, range-parser@npm:~1.2.1": + version: 1.2.1 + resolution: "range-parser@npm:1.2.1" + checksum: 10c0/96c032ac2475c8027b7a4e9fe22dc0dfe0f6d90b85e496e0f016fbdb99d6d066de0112e680805075bd989905e2123b3b3d002765149294dce0c1f7f01fcc2ea0 + languageName: node + linkType: hard + +"raw-body@npm:2.5.2": + version: 2.5.2 + resolution: "raw-body@npm:2.5.2" + dependencies: + bytes: "npm:3.1.2" + http-errors: "npm:2.0.0" + iconv-lite: "npm:0.4.24" + unpipe: "npm:1.0.0" + checksum: 10c0/b201c4b66049369a60e766318caff5cb3cc5a900efd89bdac431463822d976ad0670912c931fdbdcf5543207daf6f6833bca57aa116e1661d2ea91e12ca692c4 + languageName: node + linkType: hard + +"raw-loader@npm:^4.0.2": + version: 4.0.2 + resolution: "raw-loader@npm:4.0.2" + dependencies: + loader-utils: "npm:^2.0.0" + schema-utils: "npm:^3.0.0" + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + checksum: 10c0/981ebe65e1cee7230300d21ba6dcd8bd23ea81ef4ad2b167c0f62d93deba347f27921d330be848634baab3831cf9f38900af6082d6416c2e937fe612fa6a74ff + languageName: node + linkType: hard + +"rc@npm:1.2.8, rc@npm:^1.2.7": + version: 1.2.8 + resolution: "rc@npm:1.2.8" + dependencies: + deep-extend: "npm:^0.6.0" + ini: "npm:~1.3.0" + minimist: "npm:^1.2.0" + strip-json-comments: "npm:~2.0.1" + bin: + rc: ./cli.js + checksum: 10c0/24a07653150f0d9ac7168e52943cc3cb4b7a22c0e43c7dff3219977c2fdca5a2760a304a029c20811a0e79d351f57d46c9bde216193a0f73978496afc2b85b15 + languageName: node + linkType: hard + +"react-dev-utils@npm:^12.0.1": + version: 12.0.1 + resolution: "react-dev-utils@npm:12.0.1" + dependencies: + "@babel/code-frame": "npm:^7.16.0" + address: "npm:^1.1.2" + browserslist: "npm:^4.18.1" + chalk: "npm:^4.1.2" + cross-spawn: "npm:^7.0.3" + detect-port-alt: "npm:^1.1.6" + escape-string-regexp: "npm:^4.0.0" + filesize: "npm:^8.0.6" + find-up: "npm:^5.0.0" + fork-ts-checker-webpack-plugin: "npm:^6.5.0" + global-modules: "npm:^2.0.0" + globby: "npm:^11.0.4" + gzip-size: "npm:^6.0.0" + immer: "npm:^9.0.7" + is-root: "npm:^2.1.0" + loader-utils: "npm:^3.2.0" + open: "npm:^8.4.0" + pkg-up: "npm:^3.1.0" + prompts: "npm:^2.4.2" + react-error-overlay: "npm:^6.0.11" + recursive-readdir: "npm:^2.2.2" + shell-quote: "npm:^1.7.3" + strip-ansi: "npm:^6.0.1" + text-table: "npm:^0.2.0" + checksum: 10c0/94bc4ee5014290ca47a025e53ab2205c5dc0299670724d46a0b1bacbdd48904827b5ae410842d0a3a92481509097ae032e4a9dc7ca70db437c726eaba6411e82 + languageName: node + linkType: hard + +"react-dom@npm:^18.3.1": + version: 18.3.1 + resolution: "react-dom@npm:18.3.1" + dependencies: + loose-envify: "npm:^1.1.0" + scheduler: "npm:^0.23.2" + peerDependencies: + react: ^18.3.1 + checksum: 10c0/a752496c1941f958f2e8ac56239172296fcddce1365ce45222d04a1947e0cc5547df3e8447f855a81d6d39f008d7c32eab43db3712077f09e3f67c4874973e85 + languageName: node + linkType: hard + +"react-error-overlay@npm:^6.0.11": + version: 6.0.11 + resolution: "react-error-overlay@npm:6.0.11" + checksum: 10c0/8fc93942976e0c704274aec87dbc8e21f62a2cc78d1c93f9bcfff9f7494b00c60f7a2f0bd48d832bcd3190627c0255a1df907373f61f820371373a65ec4b2d64 + languageName: node + linkType: hard + +"react-fast-compare@npm:^3.2.0, react-fast-compare@npm:^3.2.2": + version: 3.2.2 + resolution: "react-fast-compare@npm:3.2.2" + checksum: 10c0/0bbd2f3eb41ab2ff7380daaa55105db698d965c396df73e6874831dbafec8c4b5b08ba36ff09df01526caa3c61595247e3269558c284e37646241cba2b90a367 + languageName: node + linkType: hard + +"react-helmet-async@npm:*": + version: 2.0.5 + resolution: "react-helmet-async@npm:2.0.5" + dependencies: + invariant: "npm:^2.2.4" + react-fast-compare: "npm:^3.2.2" + shallowequal: "npm:^1.1.0" + peerDependencies: + react: ^16.6.0 || ^17.0.0 || ^18.0.0 + checksum: 10c0/f390ea8bf13c2681850e5f8eb5b73d8613f407c245a5fd23e9db9b2cc14a3700dd1ce992d3966632886d1d613083294c2aeee009193f49dfa7d145d9f13ea2b0 + languageName: node + linkType: hard + +"react-helmet-async@npm:^1.3.0": + version: 1.3.0 + resolution: "react-helmet-async@npm:1.3.0" + dependencies: + "@babel/runtime": "npm:^7.12.5" + invariant: "npm:^2.2.4" + prop-types: "npm:^15.7.2" + react-fast-compare: "npm:^3.2.0" + shallowequal: "npm:^1.1.0" + peerDependencies: + react: ^16.6.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.6.0 || ^17.0.0 || ^18.0.0 + checksum: 10c0/8f3e6d26beff61d2ed18f7b41561df3e4d83a7582914c7196aa65158c7f3cce939276547d7a0b8987952d9d44131406df74efba02d1f8fa8a3940b49e6ced70b + languageName: node + linkType: hard + +"react-is@npm:^16.13.1, react-is@npm:^16.6.0, react-is@npm:^16.7.0": + version: 16.13.1 + resolution: "react-is@npm:16.13.1" + checksum: 10c0/33977da7a5f1a287936a0c85639fec6ca74f4f15ef1e59a6bc20338fc73dc69555381e211f7a3529b8150a1f71e4225525b41b60b52965bda53ce7d47377ada1 + languageName: node + linkType: hard + +"react-is@npm:^17.0.1 || ^18.0.0, react-is@npm:^18.0.0": + version: 18.3.1 + resolution: "react-is@npm:18.3.1" + checksum: 10c0/f2f1e60010c683479e74c63f96b09fb41603527cd131a9959e2aee1e5a8b0caf270b365e5ca77d4a6b18aae659b60a86150bb3979073528877029b35aecd2072 + languageName: node + linkType: hard + +"react-json-view-lite@npm:^1.2.0": + version: 1.4.0 + resolution: "react-json-view-lite@npm:1.4.0" + peerDependencies: + react: ^16.13.1 || ^17.0.0 || ^18.0.0 + checksum: 10c0/80dd21b14f9dcd93b2f473084aaa934594834a98ae2ed5725c98fae34486226d2eaa69a0bc4233f89b7bab4825e2d393efd6f7d39d59aa37a5bb44a61785f7e5 + languageName: node + linkType: hard + +"react-loadable-ssr-addon-v5-slorber@npm:^1.0.1": + version: 1.0.1 + resolution: "react-loadable-ssr-addon-v5-slorber@npm:1.0.1" + dependencies: + "@babel/runtime": "npm:^7.10.3" + peerDependencies: + react-loadable: "*" + webpack: ">=4.41.1 || 5.x" + checksum: 10c0/7b0645f66adec56646f985ba8094c66a1c0a4627d96ad80eea32431d773ef1f79aa47d3247a8f21db3b064a0c6091653c5b5d3483b7046722eb64e55bffe635c + languageName: node + linkType: hard + +"react-loadable@npm:@docusaurus/react-loadable@6.0.0": + version: 6.0.0 + resolution: "@docusaurus/react-loadable@npm:6.0.0" + dependencies: + "@types/react": "npm:*" + peerDependencies: + react: "*" + checksum: 10c0/6b145d1a8d2e7342ceef58dd154aa990322f72a6cb98955ab8ce8e3f0dc7f0c5d00f9c2e4efa8d356c5effed72a130b5588857332b11faba0398f5429b484b04 + languageName: node + linkType: hard + +"react-router-config@npm:^5.1.1": + version: 5.1.1 + resolution: "react-router-config@npm:5.1.1" + dependencies: + "@babel/runtime": "npm:^7.1.2" + peerDependencies: + react: ">=15" + react-router: ">=5" + checksum: 10c0/1f8f4e55ca68b7b012293e663eb0ee4d670a3df929b78928f713ef98cd9d62c7f5c30a098d6668e64bbb11c7d6bb24e9e6b9c985a8b82465a1858dc7ba663f2b + languageName: node + linkType: hard + +"react-router-dom@npm:^5.3.4": + version: 5.3.4 + resolution: "react-router-dom@npm:5.3.4" + dependencies: + "@babel/runtime": "npm:^7.12.13" + history: "npm:^4.9.0" + loose-envify: "npm:^1.3.1" + prop-types: "npm:^15.6.2" + react-router: "npm:5.3.4" + tiny-invariant: "npm:^1.0.2" + tiny-warning: "npm:^1.0.0" + peerDependencies: + react: ">=15" + checksum: 10c0/f04f727e2ed2e9d1d3830af02cc61690ff67b1524c0d18690582bfba0f4d14142ccc88fb6da6befad644fddf086f5ae4c2eb7048c67da8a0b0929c19426421b0 + languageName: node + linkType: hard + +"react-router@npm:5.3.4, react-router@npm:^5.3.4": + version: 5.3.4 + resolution: "react-router@npm:5.3.4" + dependencies: + "@babel/runtime": "npm:^7.12.13" + history: "npm:^4.9.0" + hoist-non-react-statics: "npm:^3.1.0" + loose-envify: "npm:^1.3.1" + path-to-regexp: "npm:^1.7.0" + prop-types: "npm:^15.6.2" + react-is: "npm:^16.6.0" + tiny-invariant: "npm:^1.0.2" + tiny-warning: "npm:^1.0.0" + peerDependencies: + react: ">=15" + checksum: 10c0/e15c00dfef199249b4c6e6d98e5e76cc352ce66f3270f13df37cc069ddf7c05e43281e8c308fc407e4435d72924373baef1d2890e0f6b0b1eb423cf47315a053 + languageName: node + linkType: hard + +"react-waypoint@npm:^10.3.0": + version: 10.3.0 + resolution: "react-waypoint@npm:10.3.0" + dependencies: + "@babel/runtime": "npm:^7.12.5" + consolidated-events: "npm:^1.1.0 || ^2.0.0" + prop-types: "npm:^15.0.0" + react-is: "npm:^17.0.1 || ^18.0.0" + peerDependencies: + react: ^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 + checksum: 10c0/872e8a04a0dca12d6405169ebdafe13f59f473c93f9d179ce833533e6126d4b7e25336a0bc00e044cb033d413aad7a87961a1d887639f27b26f662a774491ec1 + languageName: node + linkType: hard + +"react@npm:^18.3.1": + version: 18.3.1 + resolution: "react@npm:18.3.1" + dependencies: + loose-envify: "npm:^1.1.0" + checksum: 10c0/283e8c5efcf37802c9d1ce767f302dd569dd97a70d9bb8c7be79a789b9902451e0d16334b05d73299b20f048cbc3c7d288bbbde10b701fa194e2089c237dbea3 + languageName: node + linkType: hard + +"read-package-json-fast@npm:^3.0.0": + version: 3.0.2 + resolution: "read-package-json-fast@npm:3.0.2" + dependencies: + json-parse-even-better-errors: "npm:^3.0.0" + npm-normalize-package-bin: "npm:^3.0.0" + checksum: 10c0/37787e075f0260a92be0428687d9020eecad7ece3bda37461c2219e50d1ec183ab6ba1d9ada193691435dfe119a42c8a5b5b5463f08c8ddbc3d330800b265318 + languageName: node + linkType: hard + +"readable-stream@npm:^2.0.1": + version: 2.3.8 + resolution: "readable-stream@npm:2.3.8" + dependencies: + core-util-is: "npm:~1.0.0" + inherits: "npm:~2.0.3" + isarray: "npm:~1.0.0" + process-nextick-args: "npm:~2.0.0" + safe-buffer: "npm:~5.1.1" + string_decoder: "npm:~1.1.1" + util-deprecate: "npm:~1.0.1" + checksum: 10c0/7efdb01f3853bc35ac62ea25493567bf588773213f5f4a79f9c365e1ad13bab845ac0dae7bc946270dc40c3929483228415e92a3fc600cc7e4548992f41ee3fa + languageName: node + linkType: hard + +"readable-stream@npm:^3.0.2, readable-stream@npm:^3.0.6, readable-stream@npm:^3.1.1, readable-stream@npm:^3.4.0": + version: 3.6.2 + resolution: "readable-stream@npm:3.6.2" + dependencies: + inherits: "npm:^2.0.3" + string_decoder: "npm:^1.1.1" + util-deprecate: "npm:^1.0.1" + checksum: 10c0/e37be5c79c376fdd088a45fa31ea2e423e5d48854be7a22a58869b4e84d25047b193f6acb54f1012331e1bcd667ffb569c01b99d36b0bd59658fb33f513511b7 + languageName: node + linkType: hard + +"readdirp@npm:~3.6.0": + version: 3.6.0 + resolution: "readdirp@npm:3.6.0" + dependencies: + picomatch: "npm:^2.2.1" + checksum: 10c0/6fa848cf63d1b82ab4e985f4cf72bd55b7dcfd8e0a376905804e48c3634b7e749170940ba77b32804d5fe93b3cc521aa95a8d7e7d725f830da6d93f3669ce66b + languageName: node + linkType: hard + +"reading-time@npm:^1.5.0": + version: 1.5.0 + resolution: "reading-time@npm:1.5.0" + checksum: 10c0/0f730852fd4fb99e5f78c5b0cf36ab8c3fa15db96f87d9563843f6fd07a47864273ade539ebb184b785b728cde81a70283aa2d9b80cba5ca03b81868be03cabc + languageName: node + linkType: hard + +"rechoir@npm:^0.6.2": + version: 0.6.2 + resolution: "rechoir@npm:0.6.2" + dependencies: + resolve: "npm:^1.1.6" + checksum: 10c0/22c4bb32f4934a9468468b608417194f7e3ceba9a508512125b16082c64f161915a28467562368eeb15dc16058eb5b7c13a20b9eb29ff9927d1ebb3b5aa83e84 + languageName: node + linkType: hard + +"recursive-readdir@npm:^2.2.2": + version: 2.2.3 + resolution: "recursive-readdir@npm:2.2.3" + dependencies: + minimatch: "npm:^3.0.5" + checksum: 10c0/d0238f137b03af9cd645e1e0b40ae78b6cda13846e3ca57f626fcb58a66c79ae018a10e926b13b3a460f1285acc946a4e512ea8daa2e35df4b76a105709930d1 + languageName: node + linkType: hard + +"reflect.getprototypeof@npm:^1.0.4": + version: 1.0.6 + resolution: "reflect.getprototypeof@npm:1.0.6" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.1" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.4" + globalthis: "npm:^1.0.3" + which-builtin-type: "npm:^1.1.3" + checksum: 10c0/baf4ef8ee6ff341600f4720b251cf5a6cb552d6a6ab0fdc036988c451bf16f920e5feb0d46bd4f530a5cce568f1f7aca2d77447ca798920749cfc52783c39b55 + languageName: node + linkType: hard + +"regenerate-unicode-properties@npm:^10.1.0": + version: 10.1.1 + resolution: "regenerate-unicode-properties@npm:10.1.1" + dependencies: + regenerate: "npm:^1.4.2" + checksum: 10c0/89adb5ee5ba081380c78f9057c02e156a8181969f6fcca72451efc45612e0c3df767b4333f8d8479c274d9c6fe52ec4854f0d8a22ef95dccbe87da8e5f2ac77d + languageName: node + linkType: hard + +"regenerate@npm:^1.4.2": + version: 1.4.2 + resolution: "regenerate@npm:1.4.2" + checksum: 10c0/f73c9eba5d398c818edc71d1c6979eaa05af7a808682749dd079f8df2a6d91a9b913db216c2c9b03e0a8ba2bba8701244a93f45211afbff691c32c7b275db1b8 + languageName: node + linkType: hard + +"regenerator-runtime@npm:^0.14.0": + version: 0.14.1 + resolution: "regenerator-runtime@npm:0.14.1" + checksum: 10c0/1b16eb2c4bceb1665c89de70dcb64126a22bc8eb958feef3cd68fe11ac6d2a4899b5cd1b80b0774c7c03591dc57d16631a7f69d2daa2ec98100e2f29f7ec4cc4 + languageName: node + linkType: hard + +"regenerator-transform@npm:^0.15.2": + version: 0.15.2 + resolution: "regenerator-transform@npm:0.15.2" + dependencies: + "@babel/runtime": "npm:^7.8.4" + checksum: 10c0/7cfe6931ec793269701994a93bab89c0cc95379191fad866270a7fea2adfec67ea62bb5b374db77058b60ba4509319d9b608664d0d288bd9989ca8dbd08fae90 + languageName: node + linkType: hard + +"regexp.prototype.flags@npm:^1.5.2": + version: 1.5.2 + resolution: "regexp.prototype.flags@npm:1.5.2" + dependencies: + call-bind: "npm:^1.0.6" + define-properties: "npm:^1.2.1" + es-errors: "npm:^1.3.0" + set-function-name: "npm:^2.0.1" + checksum: 10c0/0f3fc4f580d9c349f8b560b012725eb9c002f36daa0041b3fbf6f4238cb05932191a4d7d5db3b5e2caa336d5150ad0402ed2be81f711f9308fe7e1a9bf9bd552 + languageName: node + linkType: hard + +"regexpu-core@npm:^5.3.1": + version: 5.3.2 + resolution: "regexpu-core@npm:5.3.2" + dependencies: + "@babel/regjsgen": "npm:^0.8.0" + regenerate: "npm:^1.4.2" + regenerate-unicode-properties: "npm:^10.1.0" + regjsparser: "npm:^0.9.1" + unicode-match-property-ecmascript: "npm:^2.0.0" + unicode-match-property-value-ecmascript: "npm:^2.1.0" + checksum: 10c0/7945d5ab10c8bbed3ca383d4274687ea825aee4ab93a9c51c6e31e1365edd5ea807f6908f800ba017b66c462944ba68011164e7055207747ab651f8111ef3770 + languageName: node + linkType: hard + +"registry-auth-token@npm:^5.0.1": + version: 5.0.2 + resolution: "registry-auth-token@npm:5.0.2" + dependencies: + "@pnpm/npm-conf": "npm:^2.1.0" + checksum: 10c0/20fc2225681cc54ae7304b31ebad5a708063b1949593f02dfe5fb402bc1fc28890cecec6497ea396ba86d6cca8a8480715926dfef8cf1f2f11e6f6cc0a1b4bde + languageName: node + linkType: hard + +"registry-url@npm:^6.0.0": + version: 6.0.1 + resolution: "registry-url@npm:6.0.1" + dependencies: + rc: "npm:1.2.8" + checksum: 10c0/66e2221c8113fc35ee9d23fe58cb516fc8d556a189fb8d6f1011a02efccc846c4c9b5075b4027b99a5d5c9ad1345ac37f297bea3c0ca30d607ec8084bf561b90 + languageName: node + linkType: hard + +"regjsparser@npm:^0.9.1": + version: 0.9.1 + resolution: "regjsparser@npm:0.9.1" + dependencies: + jsesc: "npm:~0.5.0" + bin: + regjsparser: bin/parser + checksum: 10c0/fe44fcf19a99fe4f92809b0b6179530e5ef313ff7f87df143b08ce9a2eb3c4b6189b43735d645be6e8f4033bfb015ed1ca54f0583bc7561bed53fd379feb8225 + languageName: node + linkType: hard + +"rehype-raw@npm:^7.0.0": + version: 7.0.0 + resolution: "rehype-raw@npm:7.0.0" + dependencies: + "@types/hast": "npm:^3.0.0" + hast-util-raw: "npm:^9.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/1435b4b6640a5bc3abe3b2133885c4dbff5ef2190ef9cfe09d6a63f74dd7d7ffd0cede70603278560ccf1acbfb9da9faae4b68065a28bc5aa88ad18e40f32d52 + languageName: node + linkType: hard + +"relateurl@npm:^0.2.7": + version: 0.2.7 + resolution: "relateurl@npm:0.2.7" + checksum: 10c0/c248b4e3b32474f116a804b537fa6343d731b80056fb506dffd91e737eef4cac6be47a65aae39b522b0db9d0b1011d1a12e288d82a109ecd94a5299d82f6573a + languageName: node + linkType: hard + +"remark-directive@npm:^3.0.0": + version: 3.0.0 + resolution: "remark-directive@npm:3.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-directive: "npm:^3.0.0" + micromark-extension-directive: "npm:^3.0.0" + unified: "npm:^11.0.0" + checksum: 10c0/eeec4d70501c5bce55b2528fa0c8f1e2a5c713c9f72a7d4678dd3868c425620ec409a719bb2656663296bc476c63f5d7bcacd5a9059146bfc89d40e4ce13a7f6 + languageName: node + linkType: hard + +"remark-emoji@npm:^4.0.0": + version: 4.0.1 + resolution: "remark-emoji@npm:4.0.1" + dependencies: + "@types/mdast": "npm:^4.0.2" + emoticon: "npm:^4.0.1" + mdast-util-find-and-replace: "npm:^3.0.1" + node-emoji: "npm:^2.1.0" + unified: "npm:^11.0.4" + checksum: 10c0/27f88892215f3efe8f25c43f226a82d70144a1ae5906d36f6e09390b893b2d5524d5949bd8ca6a02be0e3cb5cba908b35c4221f4e07f34e93d13d6ff9347dbb8 + languageName: node + linkType: hard + +"remark-frontmatter@npm:^5.0.0": + version: 5.0.0 + resolution: "remark-frontmatter@npm:5.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-frontmatter: "npm:^2.0.0" + micromark-extension-frontmatter: "npm:^2.0.0" + unified: "npm:^11.0.0" + checksum: 10c0/102325d5edbcf30eaf74de8a0a6e03096cc2370dfef19080fd2dd208f368fbb2323388751ac9931a1aa38a4f2828fa4bad6c52dc5249dcadcd34861693b52bf9 + languageName: node + linkType: hard + +"remark-gfm@npm:^4.0.0": + version: 4.0.0 + resolution: "remark-gfm@npm:4.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-gfm: "npm:^3.0.0" + micromark-extension-gfm: "npm:^3.0.0" + remark-parse: "npm:^11.0.0" + remark-stringify: "npm:^11.0.0" + unified: "npm:^11.0.0" + checksum: 10c0/db0aa85ab718d475c2596e27c95be9255d3b0fc730a4eda9af076b919f7dd812f7be3ac020611a8dbe5253fd29671d7b12750b56e529fdc32dfebad6dbf77403 + languageName: node + linkType: hard + +"remark-mdx@npm:^3.0.0, remark-mdx@npm:^3.0.1": + version: 3.0.1 + resolution: "remark-mdx@npm:3.0.1" + dependencies: + mdast-util-mdx: "npm:^3.0.0" + micromark-extension-mdxjs: "npm:^3.0.0" + checksum: 10c0/9e16cd5ff3b30620bd25351a2dd1701627fa5555785b35ee5fe07bd1e6793a9c825cc1f6af9e54a44351f74879f8b5ea2bce8e5a21379aeab58935e76a4d69ce + languageName: node + linkType: hard + +"remark-parse@npm:^11.0.0": + version: 11.0.0 + resolution: "remark-parse@npm:11.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-from-markdown: "npm:^2.0.0" + micromark-util-types: "npm:^2.0.0" + unified: "npm:^11.0.0" + checksum: 10c0/6eed15ddb8680eca93e04fcb2d1b8db65a743dcc0023f5007265dda558b09db595a087f622062ccad2630953cd5cddc1055ce491d25a81f3317c858348a8dd38 + languageName: node + linkType: hard + +"remark-rehype@npm:^11.0.0": + version: 11.1.0 + resolution: "remark-rehype@npm:11.1.0" + dependencies: + "@types/hast": "npm:^3.0.0" + "@types/mdast": "npm:^4.0.0" + mdast-util-to-hast: "npm:^13.0.0" + unified: "npm:^11.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/7a9534847ea70e78cf09227a4302af7e491f625fd092351a1b1ee27a2de0a369ac4acf069682e8a8ec0a55847b3e83f0be76b2028aa90e98e69e21420b9794c3 + languageName: node + linkType: hard + +"remark-stringify@npm:^11.0.0": + version: 11.0.0 + resolution: "remark-stringify@npm:11.0.0" + dependencies: + "@types/mdast": "npm:^4.0.0" + mdast-util-to-markdown: "npm:^2.0.0" + unified: "npm:^11.0.0" + checksum: 10c0/0cdb37ce1217578f6f847c7ec9f50cbab35df5b9e3903d543e74b405404e67c07defcb23cd260a567b41b769400f6de03c2c3d9cd6ae7a6707d5c8d89ead489f + languageName: node + linkType: hard + +"renderkid@npm:^3.0.0": + version: 3.0.0 + resolution: "renderkid@npm:3.0.0" + dependencies: + css-select: "npm:^4.1.3" + dom-converter: "npm:^0.2.0" + htmlparser2: "npm:^6.1.0" + lodash: "npm:^4.17.21" + strip-ansi: "npm:^6.0.1" + checksum: 10c0/24a9fae4cc50e731d059742d1b3eec163dc9e3872b12010d120c3fcbd622765d9cda41f79a1bbb4bf63c1d3442f18a08f6e1642cb5d7ebf092a0ce3f7a3bd143 + languageName: node + linkType: hard + +"require-from-string@npm:^2.0.2": + version: 2.0.2 + resolution: "require-from-string@npm:2.0.2" + checksum: 10c0/aaa267e0c5b022fc5fd4eef49d8285086b15f2a1c54b28240fdf03599cbd9c26049fee3eab894f2e1f6ca65e513b030a7c264201e3f005601e80c49fb2937ce2 + languageName: node + linkType: hard + +"require-like@npm:>= 0.1.1": + version: 0.1.2 + resolution: "require-like@npm:0.1.2" + checksum: 10c0/9035ff6c4000a56ede6fc51dd5c56541fafa5a7dddc9b1c3a5f9148d95ee21c603c9bf5c6e37b19fc7de13d9294260842d8590b2ffd6c7c773e78603d1af8050 + languageName: node + linkType: hard + +"require-relative@npm:^0.8.7": + version: 0.8.7 + resolution: "require-relative@npm:0.8.7" + checksum: 10c0/b2d36d20cb849c26fb8134064048162e029ebbf373c915e6d31b2d5caa9e9b599c7b3e70700c019c28c9347369e85ecbcf139956788ece2774d8cb355d24c36f + languageName: node + linkType: hard + +"requires-port@npm:^1.0.0": + version: 1.0.0 + resolution: "requires-port@npm:1.0.0" + checksum: 10c0/b2bfdd09db16c082c4326e573a82c0771daaf7b53b9ce8ad60ea46aa6e30aaf475fe9b164800b89f93b748d2c234d8abff945d2551ba47bf5698e04cd7713267 + languageName: node + linkType: hard + +"resolve-alpn@npm:^1.2.0": + version: 1.2.1 + resolution: "resolve-alpn@npm:1.2.1" + checksum: 10c0/b70b29c1843bc39781ef946c8cd4482e6d425976599c0f9c138cec8209e4e0736161bf39319b01676a847000085dfdaf63583c6fb4427bf751a10635bd2aa0c4 + languageName: node + linkType: hard + +"resolve-from@npm:^4.0.0": + version: 4.0.0 + resolution: "resolve-from@npm:4.0.0" + checksum: 10c0/8408eec31a3112ef96e3746c37be7d64020cda07c03a920f5024e77290a218ea758b26ca9529fd7b1ad283947f34b2291c1c0f6aa0ed34acfdda9c6014c8d190 + languageName: node + linkType: hard + +"resolve-package-path@npm:^4.0.3": + version: 4.0.3 + resolution: "resolve-package-path@npm:4.0.3" + dependencies: + path-root: "npm:^0.1.1" + checksum: 10c0/d2e7883a075b21fbf084f7615f9201e4d5aea6c22ba670dc66503a256c5eba5983d0822b9d51ef33303bfe9b0025916f622f6d780c42d7c020d826f8a9bc58fa + languageName: node + linkType: hard + +"resolve-pathname@npm:^3.0.0": + version: 3.0.0 + resolution: "resolve-pathname@npm:3.0.0" + checksum: 10c0/c6ec49b670dc35b9a303c47fa83ba9348a71e92d64a4c4bb85e1b659a29b407aa1ac1cb14a9b5b502982132ca77482bd80534bca147439d66880d35a137fe723 + languageName: node + linkType: hard + +"resolve@npm:^1.1.6, resolve@npm:^1.14.2": + version: 1.22.8 + resolution: "resolve@npm:1.22.8" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/07e179f4375e1fd072cfb72ad66d78547f86e6196c4014b31cb0b8bb1db5f7ca871f922d08da0fbc05b94e9fd42206f819648fa3b5b873ebbc8e1dc68fec433a + languageName: node + linkType: hard + +"resolve@npm:^2.0.0-next.5": + version: 2.0.0-next.5 + resolution: "resolve@npm:2.0.0-next.5" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/a6c33555e3482ea2ec4c6e3d3bf0d78128abf69dca99ae468e64f1e30acaa318fd267fb66c8836b04d558d3e2d6ed875fe388067e7d8e0de647d3c21af21c43a + languageName: node + linkType: hard + +"resolve@patch:resolve@npm%3A^1.1.6#optional!builtin, resolve@patch:resolve@npm%3A^1.14.2#optional!builtin": + version: 1.22.8 + resolution: "resolve@patch:resolve@npm%3A1.22.8#optional!builtin::version=1.22.8&hash=c3c19d" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/0446f024439cd2e50c6c8fa8ba77eaa8370b4180f401a96abf3d1ebc770ac51c1955e12764cde449fde3fff480a61f84388e3505ecdbab778f4bef5f8212c729 + languageName: node + linkType: hard + +"resolve@patch:resolve@npm%3A^2.0.0-next.5#optional!builtin": + version: 2.0.0-next.5 + resolution: "resolve@patch:resolve@npm%3A2.0.0-next.5#optional!builtin::version=2.0.0-next.5&hash=c3c19d" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/78ad6edb8309a2bfb720c2c1898f7907a37f858866ce11a5974643af1203a6a6e05b2fa9c53d8064a673a447b83d42569260c306d43628bff5bb101969708355 + languageName: node + linkType: hard + +"responselike@npm:^3.0.0": + version: 3.0.0 + resolution: "responselike@npm:3.0.0" + dependencies: + lowercase-keys: "npm:^3.0.0" + checksum: 10c0/8af27153f7e47aa2c07a5f2d538cb1e5872995f0e9ff77def858ecce5c3fe677d42b824a62cde502e56d275ab832b0a8bd350d5cd6b467ac0425214ac12ae658 + languageName: node + linkType: hard + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 10c0/59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe + languageName: node + linkType: hard + +"retry@npm:^0.13.1": + version: 0.13.1 + resolution: "retry@npm:0.13.1" + checksum: 10c0/9ae822ee19db2163497e074ea919780b1efa00431d197c7afdb950e42bf109196774b92a49fc9821f0b8b328a98eea6017410bfc5e8a0fc19c85c6d11adb3772 + languageName: node + linkType: hard + +"reusify@npm:^1.0.4": + version: 1.0.4 + resolution: "reusify@npm:1.0.4" + checksum: 10c0/c19ef26e4e188f408922c46f7ff480d38e8dfc55d448310dfb518736b23ed2c4f547fb64a6ed5bdba92cd7e7ddc889d36ff78f794816d5e71498d645ef476107 + languageName: node + linkType: hard + +"rimraf@npm:^3.0.2": + version: 3.0.2 + resolution: "rimraf@npm:3.0.2" + dependencies: + glob: "npm:^7.1.3" + bin: + rimraf: bin.js + checksum: 10c0/9cb7757acb489bd83757ba1a274ab545eafd75598a9d817e0c3f8b164238dd90eba50d6b848bd4dcc5f3040912e882dc7ba71653e35af660d77b25c381d402e8 + languageName: node + linkType: hard + +"robust-predicates@npm:^3.0.2": + version: 3.0.2 + resolution: "robust-predicates@npm:3.0.2" + checksum: 10c0/4ecd53649f1c2d49529c85518f2fa69ffb2f7a4453f7fd19c042421c7b4d76c3efb48bc1c740c8f7049346d7cb58cf08ee0c9adaae595cc23564d360adb1fde4 + languageName: node + linkType: hard + +"rtl-detect@npm:^1.0.4": + version: 1.1.2 + resolution: "rtl-detect@npm:1.1.2" + checksum: 10c0/1b92888aafca1593314f837e83fdf02eb208faae3e713ab87c176804728efd3b1980d53b64f65f1fa593348087e852c5cd729b7b9372950f6e9b7be489afc0ca + languageName: node + linkType: hard + +"rtlcss@npm:^4.1.0": + version: 4.2.0 + resolution: "rtlcss@npm:4.2.0" + dependencies: + escalade: "npm:^3.1.1" + picocolors: "npm:^1.0.0" + postcss: "npm:^8.4.21" + strip-json-comments: "npm:^3.1.1" + bin: + rtlcss: bin/rtlcss.js + checksum: 10c0/8d1512c36f426bc4f133bc14ab06f11f3f7880a88491ddab81733551465f72adace688653f13fbb6d343961c08503ede5b204bf224e8adf8941a045d5756f537 + languageName: node + linkType: hard + +"run-parallel@npm:^1.1.9": + version: 1.2.0 + resolution: "run-parallel@npm:1.2.0" + dependencies: + queue-microtask: "npm:^1.2.2" + checksum: 10c0/200b5ab25b5b8b7113f9901bfe3afc347e19bb7475b267d55ad0eb86a62a46d77510cb0f232507c9e5d497ebda569a08a9867d0d14f57a82ad5564d991588b39 + languageName: node + linkType: hard + +"rw@npm:1": + version: 1.3.3 + resolution: "rw@npm:1.3.3" + checksum: 10c0/b1e1ef37d1e79d9dc7050787866e30b6ddcb2625149276045c262c6b4d53075ddc35f387a856a8e76f0d0df59f4cd58fe24707e40797ebee66e542b840ed6a53 + languageName: node + linkType: hard + +"sade@npm:^1.7.3": + version: 1.8.1 + resolution: "sade@npm:1.8.1" + dependencies: + mri: "npm:^1.1.0" + checksum: 10c0/da8a3a5d667ad5ce3bf6d4f054bbb9f711103e5df21003c5a5c1a8a77ce12b640ed4017dd423b13c2307ea7e645adee7c2ae3afe8051b9db16a6f6d3da3f90b1 + languageName: node + linkType: hard + +"safe-array-concat@npm:^1.1.2": + version: 1.1.2 + resolution: "safe-array-concat@npm:1.1.2" + dependencies: + call-bind: "npm:^1.0.7" + get-intrinsic: "npm:^1.2.4" + has-symbols: "npm:^1.0.3" + isarray: "npm:^2.0.5" + checksum: 10c0/12f9fdb01c8585e199a347eacc3bae7b5164ae805cdc8c6707199dbad5b9e30001a50a43c4ee24dc9ea32dbb7279397850e9208a7e217f4d8b1cf5d90129dec9 + languageName: node + linkType: hard + +"safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 + languageName: node + linkType: hard + +"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:~5.2.0": + version: 5.2.1 + resolution: "safe-buffer@npm:5.2.1" + checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 + languageName: node + linkType: hard + +"safe-regex-test@npm:^1.0.3": + version: 1.0.3 + resolution: "safe-regex-test@npm:1.0.3" + dependencies: + call-bind: "npm:^1.0.6" + es-errors: "npm:^1.3.0" + is-regex: "npm:^1.1.4" + checksum: 10c0/900bf7c98dc58f08d8523b7012b468e4eb757afa624f198902c0643d7008ba777b0bdc35810ba0b758671ce887617295fb742b3f3968991b178ceca54cb07603 + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 + languageName: node + linkType: hard + +"sass-loader@npm:^10.1.1": + version: 10.5.2 + resolution: "sass-loader@npm:10.5.2" + dependencies: + klona: "npm:^2.0.4" + loader-utils: "npm:^2.0.0" + neo-async: "npm:^2.6.2" + schema-utils: "npm:^3.0.0" + semver: "npm:^7.3.2" + peerDependencies: + fibers: ">= 3.1.0" + node-sass: ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0 + sass: ^1.3.0 + webpack: ^4.36.0 || ^5.0.0 + peerDependenciesMeta: + fibers: + optional: true + node-sass: + optional: true + sass: + optional: true + checksum: 10c0/5ba4a83459fbb50e21d4f4b1b59baf1ddf8dd404099b6d1f2ec887c6903659e505879915030dd9efb1c6dd5fde2d515a19f418487b73d1cc59f6aad60c79bcf5 + languageName: node + linkType: hard + +"sass@npm:^1.78.0": + version: 1.78.0 + resolution: "sass@npm:1.78.0" + dependencies: + chokidar: "npm:>=3.0.0 <4.0.0" + immutable: "npm:^4.0.0" + source-map-js: "npm:>=0.6.2 <2.0.0" + bin: + sass: sass.js + checksum: 10c0/6577a87c00b03a5a50f3a11b4b6592f28abce34e61812e381535a3b712151bd94db3ca06467d20395431e0f38a23f99e616d6859d771fb6d4617c359f590c48c + languageName: node + linkType: hard + +"sax@npm:^1.2.4": + version: 1.4.1 + resolution: "sax@npm:1.4.1" + checksum: 10c0/6bf86318a254c5d898ede6bd3ded15daf68ae08a5495a2739564eb265cd13bcc64a07ab466fb204f67ce472bb534eb8612dac587435515169593f4fffa11de7c + languageName: node + linkType: hard + +"scheduler@npm:^0.23.2": + version: 0.23.2 + resolution: "scheduler@npm:0.23.2" + dependencies: + loose-envify: "npm:^1.1.0" + checksum: 10c0/26383305e249651d4c58e6705d5f8425f153211aef95f15161c151f7b8de885f24751b377e4a0b3dd42cce09aad3f87a61dab7636859c0d89b7daf1a1e2a5c78 + languageName: node + linkType: hard + +"schema-utils@npm:2.7.0": + version: 2.7.0 + resolution: "schema-utils@npm:2.7.0" + dependencies: + "@types/json-schema": "npm:^7.0.4" + ajv: "npm:^6.12.2" + ajv-keywords: "npm:^3.4.1" + checksum: 10c0/723c3c856a0313a89aa81c5fb2c93d4b11225f5cdd442665fddd55d3c285ae72e079f5286a3a9a1a973affe888f6c33554a2cf47b79b24cd8de2f1f756a6fb1b + languageName: node + linkType: hard + +"schema-utils@npm:^3.0.0, schema-utils@npm:^3.1.1, schema-utils@npm:^3.2.0": + version: 3.3.0 + resolution: "schema-utils@npm:3.3.0" + dependencies: + "@types/json-schema": "npm:^7.0.8" + ajv: "npm:^6.12.5" + ajv-keywords: "npm:^3.5.2" + checksum: 10c0/fafdbde91ad8aa1316bc543d4b61e65ea86970aebbfb750bfb6d8a6c287a23e415e0e926c2498696b242f63af1aab8e585252637fabe811fd37b604351da6500 + languageName: node + linkType: hard + +"schema-utils@npm:^4.0.0, schema-utils@npm:^4.0.1": + version: 4.2.0 + resolution: "schema-utils@npm:4.2.0" + dependencies: + "@types/json-schema": "npm:^7.0.9" + ajv: "npm:^8.9.0" + ajv-formats: "npm:^2.1.1" + ajv-keywords: "npm:^5.1.0" + checksum: 10c0/8dab7e7800316387fd8569870b4b668cfcecf95ac551e369ea799bbcbfb63fb0365366d4b59f64822c9f7904d8c5afcfaf5a6124a4b08783e558cd25f299a6b4 + languageName: node + linkType: hard + +"section-matter@npm:^1.0.0": + version: 1.0.0 + resolution: "section-matter@npm:1.0.0" + dependencies: + extend-shallow: "npm:^2.0.1" + kind-of: "npm:^6.0.0" + checksum: 10c0/8007f91780adc5aaa781a848eaae50b0f680bbf4043b90cf8a96778195b8fab690c87fe7a989e02394ce69890e330811ec8dab22397d384673ce59f7d750641d + languageName: node + linkType: hard + +"select-hose@npm:^2.0.0": + version: 2.0.0 + resolution: "select-hose@npm:2.0.0" + checksum: 10c0/01cc52edd29feddaf379efb4328aededa633f0ac43c64b11a8abd075ff34f05b0d280882c4fbcbdf1a0658202c9cd2ea8d5985174dcf9a2dac7e3a4996fa9b67 + languageName: node + linkType: hard + +"selfsigned@npm:^2.1.1": + version: 2.4.1 + resolution: "selfsigned@npm:2.4.1" + dependencies: + "@types/node-forge": "npm:^1.3.0" + node-forge: "npm:^1" + checksum: 10c0/521829ec36ea042f7e9963bf1da2ed040a815cf774422544b112ec53b7edc0bc50a0f8cc2ae7aa6cc19afa967c641fd96a15de0fc650c68651e41277d2e1df09 + languageName: node + linkType: hard + +"semver-diff@npm:^4.0.0": + version: 4.0.0 + resolution: "semver-diff@npm:4.0.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/3ed1bb22f39b4b6e98785bb066e821eabb9445d3b23e092866c50e7df8b9bd3eda617b242f81db4159586e0e39b0deb908dd160a24f783bd6f52095b22cd68ea + languageName: node + linkType: hard + +"semver@npm:^6.3.1": + version: 6.3.1 + resolution: "semver@npm:6.3.1" + bin: + semver: bin/semver.js + checksum: 10c0/e3d79b609071caa78bcb6ce2ad81c7966a46a7431d9d58b8800cfa9cb6a63699b3899a0e4bcce36167a284578212d9ae6942b6929ba4aa5015c079a67751d42d + languageName: node + linkType: hard + +"semver@npm:^7.1.1, semver@npm:^7.3.2, semver@npm:^7.3.5, semver@npm:^7.3.6, semver@npm:^7.3.7, semver@npm:^7.3.8, semver@npm:^7.5.3, semver@npm:^7.5.4, semver@npm:^7.6.0": + version: 7.6.3 + resolution: "semver@npm:7.6.3" + bin: + semver: bin/semver.js + checksum: 10c0/88f33e148b210c153873cb08cfe1e281d518aaa9a666d4d148add6560db5cd3c582f3a08ccb91f38d5f379ead256da9931234ed122057f40bb5766e65e58adaf + languageName: node + linkType: hard + +"send@npm:0.19.0": + version: 0.19.0 + resolution: "send@npm:0.19.0" + dependencies: + debug: "npm:2.6.9" + depd: "npm:2.0.0" + destroy: "npm:1.2.0" + encodeurl: "npm:~1.0.2" + escape-html: "npm:~1.0.3" + etag: "npm:~1.8.1" + fresh: "npm:0.5.2" + http-errors: "npm:2.0.0" + mime: "npm:1.6.0" + ms: "npm:2.1.3" + on-finished: "npm:2.4.1" + range-parser: "npm:~1.2.1" + statuses: "npm:2.0.1" + checksum: 10c0/ea3f8a67a8f0be3d6bf9080f0baed6d2c51d11d4f7b4470de96a5029c598a7011c497511ccc28968b70ef05508675cebff27da9151dd2ceadd60be4e6cf845e3 + languageName: node + linkType: hard + +"serialize-javascript@npm:^6.0.0, serialize-javascript@npm:^6.0.1": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: "npm:^2.1.0" + checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 + languageName: node + linkType: hard + +"serve-handler@npm:^6.1.5": + version: 6.1.5 + resolution: "serve-handler@npm:6.1.5" + dependencies: + bytes: "npm:3.0.0" + content-disposition: "npm:0.5.2" + fast-url-parser: "npm:1.1.3" + mime-types: "npm:2.1.18" + minimatch: "npm:3.1.2" + path-is-inside: "npm:1.0.2" + path-to-regexp: "npm:2.2.1" + range-parser: "npm:1.2.0" + checksum: 10c0/6fd393ae37a0305107e634ca545322b00605322189fe70d8f1a4a90a101c4e354768c610efe5a7ef1af3820cec5c33d97467c88151f35a3cb41d8ff2075ef802 + languageName: node + linkType: hard + +"serve-index@npm:^1.9.1": + version: 1.9.1 + resolution: "serve-index@npm:1.9.1" + dependencies: + accepts: "npm:~1.3.4" + batch: "npm:0.6.1" + debug: "npm:2.6.9" + escape-html: "npm:~1.0.3" + http-errors: "npm:~1.6.2" + mime-types: "npm:~2.1.17" + parseurl: "npm:~1.3.2" + checksum: 10c0/a666471a24196f74371edf2c3c7bcdd82adbac52f600804508754b5296c3567588bf694258b19e0cb23a567acfa20d9721bfdaed3286007b81f9741ada8a3a9c + languageName: node + linkType: hard + +"serve-static@npm:1.16.2": + version: 1.16.2 + resolution: "serve-static@npm:1.16.2" + dependencies: + encodeurl: "npm:~2.0.0" + escape-html: "npm:~1.0.3" + parseurl: "npm:~1.3.3" + send: "npm:0.19.0" + checksum: 10c0/528fff6f5e12d0c5a391229ad893910709bc51b5705962b09404a1d813857578149b8815f35d3ee5752f44cd378d0f31669d4b1d7e2d11f41e08283d5134bd1f + languageName: node + linkType: hard + +"set-function-length@npm:^1.2.1": + version: 1.2.2 + resolution: "set-function-length@npm:1.2.2" + dependencies: + define-data-property: "npm:^1.1.4" + es-errors: "npm:^1.3.0" + function-bind: "npm:^1.1.2" + get-intrinsic: "npm:^1.2.4" + gopd: "npm:^1.0.1" + has-property-descriptors: "npm:^1.0.2" + checksum: 10c0/82850e62f412a258b71e123d4ed3873fa9377c216809551192bb6769329340176f109c2eeae8c22a8d386c76739855f78e8716515c818bcaef384b51110f0f3c + languageName: node + linkType: hard + +"set-function-name@npm:^2.0.1, set-function-name@npm:^2.0.2": + version: 2.0.2 + resolution: "set-function-name@npm:2.0.2" + dependencies: + define-data-property: "npm:^1.1.4" + es-errors: "npm:^1.3.0" + functions-have-names: "npm:^1.2.3" + has-property-descriptors: "npm:^1.0.2" + checksum: 10c0/fce59f90696c450a8523e754abb305e2b8c73586452619c2bad5f7bf38c7b6b4651895c9db895679c5bef9554339cf3ef1c329b66ece3eda7255785fbe299316 + languageName: node + linkType: hard + +"setprototypeof@npm:1.1.0": + version: 1.1.0 + resolution: "setprototypeof@npm:1.1.0" + checksum: 10c0/a77b20876689c6a89c3b42f0c3596a9cae02f90fc902570cbd97198e9e8240382086c9303ad043e88cee10f61eae19f1004e51d885395a1e9bf49f9ebed12872 + languageName: node + linkType: hard + +"setprototypeof@npm:1.2.0": + version: 1.2.0 + resolution: "setprototypeof@npm:1.2.0" + checksum: 10c0/68733173026766fa0d9ecaeb07f0483f4c2dc70ca376b3b7c40b7cda909f94b0918f6c5ad5ce27a9160bdfb475efaa9d5e705a11d8eaae18f9835d20976028bc + languageName: node + linkType: hard + +"shallow-clone@npm:^3.0.0": + version: 3.0.1 + resolution: "shallow-clone@npm:3.0.1" + dependencies: + kind-of: "npm:^6.0.2" + checksum: 10c0/7bab09613a1b9f480c85a9823aebec533015579fa055ba6634aa56ba1f984380670eaf33b8217502931872aa1401c9fcadaa15f9f604d631536df475b05bcf1e + languageName: node + linkType: hard + +"shallowequal@npm:^1.1.0": + version: 1.1.0 + resolution: "shallowequal@npm:1.1.0" + checksum: 10c0/b926efb51cd0f47aa9bc061add788a4a650550bbe50647962113a4579b60af2abe7b62f9b02314acc6f97151d4cf87033a2b15fc20852fae306d1a095215396c + languageName: node + linkType: hard + +"sharp@npm:^0.32.3": + version: 0.32.6 + resolution: "sharp@npm:0.32.6" + dependencies: + color: "npm:^4.2.3" + detect-libc: "npm:^2.0.2" + node-addon-api: "npm:^6.1.0" + node-gyp: "npm:latest" + prebuild-install: "npm:^7.1.1" + semver: "npm:^7.5.4" + simple-get: "npm:^4.0.1" + tar-fs: "npm:^3.0.4" + tunnel-agent: "npm:^0.6.0" + checksum: 10c0/f6a756fec5051ef2f9341e0543cde1da4e822982dd5398010baad92e2262bd177e08b753eb19b2fbee30f2fcb0e8756f24088fafc48293a364e9a8f8dc65a300 + languageName: node + linkType: hard + +"shebang-command@npm:^2.0.0": + version: 2.0.0 + resolution: "shebang-command@npm:2.0.0" + dependencies: + shebang-regex: "npm:^3.0.0" + checksum: 10c0/a41692e7d89a553ef21d324a5cceb5f686d1f3c040759c50aab69688634688c5c327f26f3ecf7001ebfd78c01f3c7c0a11a7c8bfd0a8bc9f6240d4f40b224e4e + languageName: node + linkType: hard + +"shebang-regex@npm:^3.0.0": + version: 3.0.0 + resolution: "shebang-regex@npm:3.0.0" + checksum: 10c0/1dbed0726dd0e1152a92696c76c7f06084eb32a90f0528d11acd764043aacf76994b2fb30aa1291a21bd019d6699164d048286309a278855ee7bec06cf6fb690 + languageName: node + linkType: hard + +"shell-quote@npm:^1.7.3, shell-quote@npm:^1.8.1": + version: 1.8.1 + resolution: "shell-quote@npm:1.8.1" + checksum: 10c0/8cec6fd827bad74d0a49347057d40dfea1e01f12a6123bf82c4649f3ef152fc2bc6d6176e6376bffcd205d9d0ccb4f1f9acae889384d20baff92186f01ea455a + languageName: node + linkType: hard + +"shelljs@npm:^0.8.5": + version: 0.8.5 + resolution: "shelljs@npm:0.8.5" + dependencies: + glob: "npm:^7.0.0" + interpret: "npm:^1.0.0" + rechoir: "npm:^0.6.2" + bin: + shjs: bin/shjs + checksum: 10c0/feb25289a12e4bcd04c40ddfab51aff98a3729f5c2602d5b1a1b95f6819ec7804ac8147ebd8d9a85dfab69d501bcf92d7acef03247320f51c1552cec8d8e2382 + languageName: node + linkType: hard + +"side-channel@npm:^1.0.4, side-channel@npm:^1.0.6": + version: 1.0.6 + resolution: "side-channel@npm:1.0.6" + dependencies: + call-bind: "npm:^1.0.7" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.4" + object-inspect: "npm:^1.13.1" + checksum: 10c0/d2afd163dc733cc0a39aa6f7e39bf0c436293510dbccbff446733daeaf295857dbccf94297092ec8c53e2503acac30f0b78830876f0485991d62a90e9cad305f + languageName: node + linkType: hard + +"signal-exit@npm:^3.0.2, signal-exit@npm:^3.0.3": + version: 3.0.7 + resolution: "signal-exit@npm:3.0.7" + checksum: 10c0/25d272fa73e146048565e08f3309d5b942c1979a6f4a58a8c59d5fa299728e9c2fcd1a759ec870863b1fd38653670240cd420dad2ad9330c71f36608a6a1c912 + languageName: node + linkType: hard + +"signal-exit@npm:^4.0.1": + version: 4.1.0 + resolution: "signal-exit@npm:4.1.0" + checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 + languageName: node + linkType: hard + +"simple-concat@npm:^1.0.0": + version: 1.0.1 + resolution: "simple-concat@npm:1.0.1" + checksum: 10c0/62f7508e674414008910b5397c1811941d457dfa0db4fd5aa7fa0409eb02c3609608dfcd7508cace75b3a0bf67a2a77990711e32cd213d2c76f4fd12ee86d776 + languageName: node + linkType: hard + +"simple-get@npm:^4.0.0, simple-get@npm:^4.0.1": + version: 4.0.1 + resolution: "simple-get@npm:4.0.1" + dependencies: + decompress-response: "npm:^6.0.0" + once: "npm:^1.3.1" + simple-concat: "npm:^1.0.0" + checksum: 10c0/b0649a581dbca741babb960423248899203165769747142033479a7dc5e77d7b0fced0253c731cd57cf21e31e4d77c9157c3069f4448d558ebc96cf9e1eebcf0 + languageName: node + linkType: hard + +"simple-swizzle@npm:^0.2.2": + version: 0.2.2 + resolution: "simple-swizzle@npm:0.2.2" + dependencies: + is-arrayish: "npm:^0.3.1" + checksum: 10c0/df5e4662a8c750bdba69af4e8263c5d96fe4cd0f9fe4bdfa3cbdeb45d2e869dff640beaaeb1ef0e99db4d8d2ec92f85508c269f50c972174851bc1ae5bd64308 + languageName: node + linkType: hard + +"sirv@npm:^2.0.3": + version: 2.0.4 + resolution: "sirv@npm:2.0.4" + dependencies: + "@polka/url": "npm:^1.0.0-next.24" + mrmime: "npm:^2.0.0" + totalist: "npm:^3.0.0" + checksum: 10c0/68f8ee857f6a9415e9c07a1f31c7c561df8d5f1b1ba79bee3de583fa37da8718def5309f6b1c6e2c3ef77de45d74f5e49efc7959214443aa92d42e9c99180a4e + languageName: node + linkType: hard + +"sisteransi@npm:^1.0.5": + version: 1.0.5 + resolution: "sisteransi@npm:1.0.5" + checksum: 10c0/230ac975cca485b7f6fe2b96a711aa62a6a26ead3e6fb8ba17c5a00d61b8bed0d7adc21f5626b70d7c33c62ff4e63933017a6462942c719d1980bb0b1207ad46 + languageName: node + linkType: hard + +"sitemap@npm:^7.1.1": + version: 7.1.2 + resolution: "sitemap@npm:7.1.2" + dependencies: + "@types/node": "npm:^17.0.5" + "@types/sax": "npm:^1.2.1" + arg: "npm:^5.0.0" + sax: "npm:^1.2.4" + bin: + sitemap: dist/cli.js + checksum: 10c0/01dd1268c0d4b89f8ef082bcb9ef18d0182d00d1622e9c54743474918169491e5360538f9a01a769262e0fe23d6e3822a90680eff0f076cf87b68d459014a34c + languageName: node + linkType: hard + +"skin-tone@npm:^2.0.0": + version: 2.0.0 + resolution: "skin-tone@npm:2.0.0" + dependencies: + unicode-emoji-modifier-base: "npm:^1.0.0" + checksum: 10c0/82d4c2527864f9cbd6cb7f3c4abb31e2224752234d5013b881d3e34e9ab543545b05206df5a17d14b515459fcb265ce409f9cfe443903176b0360cd20e4e4ba5 + languageName: node + linkType: hard + +"slash@npm:^3.0.0": + version: 3.0.0 + resolution: "slash@npm:3.0.0" + checksum: 10c0/e18488c6a42bdfd4ac5be85b2ced3ccd0224773baae6ad42cfbb9ec74fc07f9fa8396bd35ee638084ead7a2a0818eb5e7151111544d4731ce843019dab4be47b + languageName: node + linkType: hard + +"slash@npm:^4.0.0": + version: 4.0.0 + resolution: "slash@npm:4.0.0" + checksum: 10c0/b522ca75d80d107fd30d29df0549a7b2537c83c4c4ecd12cd7d4ea6c8aaca2ab17ada002e7a1d78a9d736a0261509f26ea5b489082ee443a3a810586ef8eff18 + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: 10c0/a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 + languageName: node + linkType: hard + +"snake-case@npm:^3.0.4": + version: 3.0.4 + resolution: "snake-case@npm:3.0.4" + dependencies: + dot-case: "npm:^3.0.4" + tslib: "npm:^2.0.3" + checksum: 10c0/ab19a913969f58f4474fe9f6e8a026c8a2142a01f40b52b79368068343177f818cdfef0b0c6b9558f298782441d5ca8ed5932eb57822439fad791d866e62cecd + languageName: node + linkType: hard + +"sockjs@npm:^0.3.24": + version: 0.3.24 + resolution: "sockjs@npm:0.3.24" + dependencies: + faye-websocket: "npm:^0.11.3" + uuid: "npm:^8.3.2" + websocket-driver: "npm:^0.7.4" + checksum: 10c0/aa102c7d921bf430215754511c81ea7248f2dcdf268fbdb18e4d8183493a86b8793b164c636c52f474a886f747447c962741df2373888823271efdb9d2594f33 + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^8.0.3": + version: 8.0.4 + resolution: "socks-proxy-agent@npm:8.0.4" + dependencies: + agent-base: "npm:^7.1.1" + debug: "npm:^4.3.4" + socks: "npm:^2.8.3" + checksum: 10c0/345593bb21b95b0508e63e703c84da11549f0a2657d6b4e3ee3612c312cb3a907eac10e53b23ede3557c6601d63252103494caa306b66560f43af7b98f53957a + languageName: node + linkType: hard + +"socks@npm:^2.8.3": + version: 2.8.3 + resolution: "socks@npm:2.8.3" + dependencies: + ip-address: "npm:^9.0.5" + smart-buffer: "npm:^4.2.0" + checksum: 10c0/d54a52bf9325165770b674a67241143a3d8b4e4c8884560c4e0e078aace2a728dffc7f70150660f51b85797c4e1a3b82f9b7aa25e0a0ceae1a243365da5c51a7 + languageName: node + linkType: hard + +"sort-css-media-queries@npm:2.2.0": + version: 2.2.0 + resolution: "sort-css-media-queries@npm:2.2.0" + checksum: 10c0/7478308c7ca93409f959ab993d41de2f0515ed5f51b671908ecb777aae0d63be97b454d59d80e14ee4874884618a2e825d4ae7ccb225779276904dd175f4e766 + languageName: node + linkType: hard + +"source-map-js@npm:>=0.6.2 <2.0.0, source-map-js@npm:^1.0.1, source-map-js@npm:^1.2.0": + version: 1.2.0 + resolution: "source-map-js@npm:1.2.0" + checksum: 10c0/7e5f896ac10a3a50fe2898e5009c58ff0dc102dcb056ed27a354623a0ece8954d4b2649e1a1b2b52ef2e161d26f8859c7710350930751640e71e374fe2d321a4 + languageName: node + linkType: hard + +"source-map-support@npm:~0.5.20": + version: 0.5.21 + resolution: "source-map-support@npm:0.5.21" + dependencies: + buffer-from: "npm:^1.0.0" + source-map: "npm:^0.6.0" + checksum: 10c0/9ee09942f415e0f721d6daad3917ec1516af746a8120bba7bb56278707a37f1eb8642bde456e98454b8a885023af81a16e646869975f06afc1a711fb90484e7d + languageName: node + linkType: hard + +"source-map@npm:^0.6.0, source-map@npm:~0.6.0": + version: 0.6.1 + resolution: "source-map@npm:0.6.1" + checksum: 10c0/ab55398007c5e5532957cb0beee2368529618ac0ab372d789806f5718123cc4367d57de3904b4e6a4170eb5a0b0f41373066d02ca0735a0c4d75c7d328d3e011 + languageName: node + linkType: hard + +"source-map@npm:^0.7.0": + version: 0.7.4 + resolution: "source-map@npm:0.7.4" + checksum: 10c0/dc0cf3768fe23c345ea8760487f8c97ef6fca8a73c83cd7c9bf2fde8bc2c34adb9c0824d6feb14bc4f9e37fb522e18af621543f1289038a66ac7586da29aa7dc + languageName: node + linkType: hard + +"space-separated-tokens@npm:^2.0.0": + version: 2.0.2 + resolution: "space-separated-tokens@npm:2.0.2" + checksum: 10c0/6173e1d903dca41dcab6a2deed8b4caf61bd13b6d7af8374713500570aa929ff9414ae09a0519f4f8772df993300305a395d4871f35bc4ca72b6db57e1f30af8 + languageName: node + linkType: hard + +"spdx-correct@npm:^3.0.0": + version: 3.2.0 + resolution: "spdx-correct@npm:3.2.0" + dependencies: + spdx-expression-parse: "npm:^3.0.0" + spdx-license-ids: "npm:^3.0.0" + checksum: 10c0/49208f008618b9119208b0dadc9208a3a55053f4fd6a0ae8116861bd22696fc50f4142a35ebfdb389e05ccf2de8ad142573fefc9e26f670522d899f7b2fe7386 + languageName: node + linkType: hard + +"spdx-exceptions@npm:^2.1.0": + version: 2.5.0 + resolution: "spdx-exceptions@npm:2.5.0" + checksum: 10c0/37217b7762ee0ea0d8b7d0c29fd48b7e4dfb94096b109d6255b589c561f57da93bf4e328c0290046115961b9209a8051ad9f525e48d433082fc79f496a4ea940 + languageName: node + linkType: hard + +"spdx-expression-parse@npm:^3.0.0": + version: 3.0.1 + resolution: "spdx-expression-parse@npm:3.0.1" + dependencies: + spdx-exceptions: "npm:^2.1.0" + spdx-license-ids: "npm:^3.0.0" + checksum: 10c0/6f8a41c87759fa184a58713b86c6a8b028250f158159f1d03ed9d1b6ee4d9eefdc74181c8ddc581a341aa971c3e7b79e30b59c23b05d2436d5de1c30bdef7171 + languageName: node + linkType: hard + +"spdx-license-ids@npm:^3.0.0": + version: 3.0.20 + resolution: "spdx-license-ids@npm:3.0.20" + checksum: 10c0/bdff7534fad6ef59be49becda1edc3fb7f5b3d6f296a715516ab9d972b8ad59af2c34b2003e01db8970d4c673d185ff696ba74c6b61d3bf327e2b3eac22c297c + languageName: node + linkType: hard + +"spdy-transport@npm:^3.0.0": + version: 3.0.0 + resolution: "spdy-transport@npm:3.0.0" + dependencies: + debug: "npm:^4.1.0" + detect-node: "npm:^2.0.4" + hpack.js: "npm:^2.1.6" + obuf: "npm:^1.1.2" + readable-stream: "npm:^3.0.6" + wbuf: "npm:^1.7.3" + checksum: 10c0/eaf7440fa90724fffc813c386d4a8a7427d967d6e46d7c51d8f8a533d1a6911b9823ea9218703debbae755337e85f110185d7a00ae22ec5c847077b908ce71bb + languageName: node + linkType: hard + +"spdy@npm:^4.0.2": + version: 4.0.2 + resolution: "spdy@npm:4.0.2" + dependencies: + debug: "npm:^4.1.0" + handle-thing: "npm:^2.0.0" + http-deceiver: "npm:^1.2.7" + select-hose: "npm:^2.0.0" + spdy-transport: "npm:^3.0.0" + checksum: 10c0/983509c0be9d06fd00bb9dff713c5b5d35d3ffd720db869acdd5ad7aa6fc0e02c2318b58f75328957d8ff772acdf1f7d19382b6047df342044ff3e2d6805ccdf + languageName: node + linkType: hard + +"sprintf-js@npm:^1.1.3": + version: 1.1.3 + resolution: "sprintf-js@npm:1.1.3" + checksum: 10c0/09270dc4f30d479e666aee820eacd9e464215cdff53848b443964202bf4051490538e5dd1b42e1a65cf7296916ca17640aebf63dae9812749c7542ee5f288dec + languageName: node + linkType: hard + +"sprintf-js@npm:~1.0.2": + version: 1.0.3 + resolution: "sprintf-js@npm:1.0.3" + checksum: 10c0/ecadcfe4c771890140da5023d43e190b7566d9cf8b2d238600f31bec0fc653f328da4450eb04bd59a431771a8e9cc0e118f0aa3974b683a4981b4e07abc2a5bb + languageName: node + linkType: hard + +"srcset@npm:^4.0.0": + version: 4.0.0 + resolution: "srcset@npm:4.0.0" + checksum: 10c0/0685c3bd2423b33831734fb71560cd8784f024895e70ee2ac2c392e30047c27ffd9481e001950fb0503f4906bc3fe963145935604edad77944d09c9800990660 + languageName: node + linkType: hard + +"ssri@npm:^10.0.0": + version: 10.0.6 + resolution: "ssri@npm:10.0.6" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/e5a1e23a4057a86a97971465418f22ea89bd439ac36ade88812dd920e4e61873e8abd6a9b72a03a67ef50faa00a2daf1ab745c5a15b46d03e0544a0296354227 + languageName: node + linkType: hard + +"statuses@npm:2.0.1": + version: 2.0.1 + resolution: "statuses@npm:2.0.1" + checksum: 10c0/34378b207a1620a24804ce8b5d230fea0c279f00b18a7209646d5d47e419d1cc23e7cbf33a25a1e51ac38973dc2ac2e1e9c647a8e481ef365f77668d72becfd0 + languageName: node + linkType: hard + +"statuses@npm:>= 1.4.0 < 2": + version: 1.5.0 + resolution: "statuses@npm:1.5.0" + checksum: 10c0/e433900956357b3efd79b1c547da4d291799ac836960c016d10a98f6a810b1b5c0dcc13b5a7aa609a58239b5190e1ea176ad9221c2157d2fd1c747393e6b2940 + languageName: node + linkType: hard + +"std-env@npm:^3.0.1": + version: 3.7.0 + resolution: "std-env@npm:3.7.0" + checksum: 10c0/60edf2d130a4feb7002974af3d5a5f3343558d1ccf8d9b9934d225c638606884db4a20d2fe6440a09605bca282af6b042ae8070a10490c0800d69e82e478f41e + languageName: node + linkType: hard + +"streamx@npm:^2.15.0, streamx@npm:^2.18.0": + version: 2.18.0 + resolution: "streamx@npm:2.18.0" + dependencies: + bare-events: "npm:^2.2.0" + fast-fifo: "npm:^1.3.2" + queue-tick: "npm:^1.0.1" + text-decoder: "npm:^1.1.0" + dependenciesMeta: + bare-events: + optional: true + checksum: 10c0/ef50f419252a73dd35abcde72329eafbf5ad9cd2e27f0cc3abebeff6e0dbea124ac6d3e16acbdf081cce41b4125393ac22f9848fcfa19e640830734883e622ba + languageName: node + linkType: hard + +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: "npm:^8.0.0" + is-fullwidth-code-point: "npm:^3.0.0" + strip-ansi: "npm:^6.0.1" + checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b + languageName: node + linkType: hard + +"string-width@npm:^5.0.1, string-width@npm:^5.1.2": + version: 5.1.2 + resolution: "string-width@npm:5.1.2" + dependencies: + eastasianwidth: "npm:^0.2.0" + emoji-regex: "npm:^9.2.2" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/ab9c4264443d35b8b923cbdd513a089a60de339216d3b0ed3be3ba57d6880e1a192b70ae17225f764d7adbf5994e9bb8df253a944736c15a0240eff553c678ca + languageName: node + linkType: hard + +"string-width@npm:^6.0.0": + version: 6.1.0 + resolution: "string-width@npm:6.1.0" + dependencies: + eastasianwidth: "npm:^0.2.0" + emoji-regex: "npm:^10.2.1" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/7b2991ea7c946a43042070787b85af454079116dfd6d853aab4ff8a6d4ac717cdc18656cfee15b7a7a78286669202a4a56385728f0740cb1e15001c71807b361 + languageName: node + linkType: hard + +"string.prototype.matchall@npm:^4.0.11": + version: 4.0.11 + resolution: "string.prototype.matchall@npm:4.0.11" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.2" + es-errors: "npm:^1.3.0" + es-object-atoms: "npm:^1.0.0" + get-intrinsic: "npm:^1.2.4" + gopd: "npm:^1.0.1" + has-symbols: "npm:^1.0.3" + internal-slot: "npm:^1.0.7" + regexp.prototype.flags: "npm:^1.5.2" + set-function-name: "npm:^2.0.2" + side-channel: "npm:^1.0.6" + checksum: 10c0/915a2562ac9ab5e01b7be6fd8baa0b2b233a0a9aa975fcb2ec13cc26f08fb9a3e85d5abdaa533c99c6fc4c5b65b914eba3d80c4aff9792a4c9fed403f28f7d9d + languageName: node + linkType: hard + +"string.prototype.repeat@npm:^1.0.0": + version: 1.0.0 + resolution: "string.prototype.repeat@npm:1.0.0" + dependencies: + define-properties: "npm:^1.1.3" + es-abstract: "npm:^1.17.5" + checksum: 10c0/94c7978566cffa1327d470fd924366438af9b04b497c43a9805e476e2e908aa37a1fd34cc0911156c17556dab62159d12c7b92b3cc304c3e1281fe4c8e668f40 + languageName: node + linkType: hard + +"string.prototype.trim@npm:^1.2.9": + version: 1.2.9 + resolution: "string.prototype.trim@npm:1.2.9" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.0" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/dcef1a0fb61d255778155006b372dff8cc6c4394bc39869117e4241f41a2c52899c0d263ffc7738a1f9e61488c490b05c0427faa15151efad721e1a9fb2663c2 + languageName: node + linkType: hard + +"string.prototype.trimend@npm:^1.0.8": + version: 1.0.8 + resolution: "string.prototype.trimend@npm:1.0.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/0a0b54c17c070551b38e756ae271865ac6cc5f60dabf2e7e343cceae7d9b02e1a1120a824e090e79da1b041a74464e8477e2da43e2775c85392be30a6f60963c + languageName: node + linkType: hard + +"string.prototype.trimstart@npm:^1.0.8": + version: 1.0.8 + resolution: "string.prototype.trimstart@npm:1.0.8" + dependencies: + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-object-atoms: "npm:^1.0.0" + checksum: 10c0/d53af1899959e53c83b64a5fd120be93e067da740e7e75acb433849aa640782fb6c7d4cd5b84c954c84413745a3764df135a8afeb22908b86a835290788d8366 + languageName: node + linkType: hard + +"string_decoder@npm:^1.1.1": + version: 1.3.0 + resolution: "string_decoder@npm:1.3.0" + dependencies: + safe-buffer: "npm:~5.2.0" + checksum: 10c0/810614ddb030e271cd591935dcd5956b2410dd079d64ff92a1844d6b7588bf992b3e1b69b0f4d34a3e06e0bd73046ac646b5264c1987b20d0601f81ef35d731d + languageName: node + linkType: hard + +"string_decoder@npm:~1.1.1": + version: 1.1.1 + resolution: "string_decoder@npm:1.1.1" + dependencies: + safe-buffer: "npm:~5.1.0" + checksum: 10c0/b4f89f3a92fd101b5653ca3c99550e07bdf9e13b35037e9e2a1c7b47cec4e55e06ff3fc468e314a0b5e80bfbaf65c1ca5a84978764884ae9413bec1fc6ca924e + languageName: node + linkType: hard + +"stringify-entities@npm:^4.0.0": + version: 4.0.4 + resolution: "stringify-entities@npm:4.0.4" + dependencies: + character-entities-html4: "npm:^2.0.0" + character-entities-legacy: "npm:^3.0.0" + checksum: 10c0/537c7e656354192406bdd08157d759cd615724e9d0873602d2c9b2f6a5c0a8d0b1d73a0a08677848105c5eebac6db037b57c0b3a4ec86331117fa7319ed50448 + languageName: node + linkType: hard + +"stringify-object@npm:^3.3.0": + version: 3.3.0 + resolution: "stringify-object@npm:3.3.0" + dependencies: + get-own-enumerable-property-symbols: "npm:^3.0.0" + is-obj: "npm:^1.0.1" + is-regexp: "npm:^1.0.0" + checksum: 10c0/ba8078f84128979ee24b3de9a083489cbd3c62cb8572a061b47d4d82601a8ae4b4d86fa8c54dd955593da56bb7c16a6de51c27221fdc6b7139bb4f29d815f35b + languageName: node + linkType: hard + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: "npm:^5.0.1" + checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 + languageName: node + linkType: hard + +"strip-ansi@npm:^3.0.0": + version: 3.0.1 + resolution: "strip-ansi@npm:3.0.1" + dependencies: + ansi-regex: "npm:^2.0.0" + checksum: 10c0/f6e7fbe8e700105dccf7102eae20e4f03477537c74b286fd22cfc970f139002ed6f0d9c10d0e21aa9ed9245e0fa3c9275930e8795c5b947da136e4ecb644a70f + languageName: node + linkType: hard + +"strip-ansi@npm:^7.0.1": + version: 7.1.0 + resolution: "strip-ansi@npm:7.1.0" + dependencies: + ansi-regex: "npm:^6.0.1" + checksum: 10c0/a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 + languageName: node + linkType: hard + +"strip-bom-string@npm:^1.0.0": + version: 1.0.0 + resolution: "strip-bom-string@npm:1.0.0" + checksum: 10c0/5c5717e2643225aa6a6d659d34176ab2657037f1fe2423ac6fcdb488f135e14fef1022030e426d8b4d0989e09adbd5c3288d5d3b9c632abeefd2358dfc512bca + languageName: node + linkType: hard + +"strip-final-newline@npm:^2.0.0": + version: 2.0.0 + resolution: "strip-final-newline@npm:2.0.0" + checksum: 10c0/bddf8ccd47acd85c0e09ad7375409d81653f645fda13227a9d459642277c253d877b68f2e5e4d819fe75733b0e626bac7e954c04f3236f6d196f79c94fa4a96f + languageName: node + linkType: hard + +"strip-json-comments@npm:^3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 10c0/9681a6257b925a7fa0f285851c0e613cc934a50661fa7bb41ca9cbbff89686bb4a0ee366e6ecedc4daafd01e83eee0720111ab294366fe7c185e935475ebcecd + languageName: node + linkType: hard + +"strip-json-comments@npm:~2.0.1": + version: 2.0.1 + resolution: "strip-json-comments@npm:2.0.1" + checksum: 10c0/b509231cbdee45064ff4f9fd73609e2bcc4e84a4d508e9dd0f31f70356473fde18abfb5838c17d56fb236f5a06b102ef115438de0600b749e818a35fbbc48c43 + languageName: node + linkType: hard + +"style-to-object@npm:^0.4.0": + version: 0.4.4 + resolution: "style-to-object@npm:0.4.4" + dependencies: + inline-style-parser: "npm:0.1.1" + checksum: 10c0/3a733080da66952881175b17d65f92985cf94c1ca358a92cf21b114b1260d49b94a404ed79476047fb95698d64c7e366ca7443f0225939e2fb34c38bbc9c7639 + languageName: node + linkType: hard + +"style-to-object@npm:^1.0.0": + version: 1.0.6 + resolution: "style-to-object@npm:1.0.6" + dependencies: + inline-style-parser: "npm:0.2.3" + checksum: 10c0/be5e8e3f0e35c0338de4112b9d861db576a52ebbd97f2501f1fb2c900d05c8fc42c5114407fa3a7f8b39301146cd8ca03a661bf52212394125a9629d5b771aba + languageName: node + linkType: hard + +"stylehacks@npm:^6.1.1": + version: 6.1.1 + resolution: "stylehacks@npm:6.1.1" + dependencies: + browserslist: "npm:^4.23.0" + postcss-selector-parser: "npm:^6.0.16" + peerDependencies: + postcss: ^8.4.31 + checksum: 10c0/2dd2bccfd8311ff71492e63a7b8b86c3d7b1fff55d4ba5a2357aff97743e633d351cdc2f5ae3c0057637d00dab4ef5fc5b218a1b370e4585a41df22b5a5128be + languageName: node + linkType: hard + +"stylis@npm:^4.1.3": + version: 4.3.3 + resolution: "stylis@npm:4.3.3" + checksum: 10c0/07b0461ff32d8c805ad22dc9da37f1c0cf613102f4b8a1e9ddd6b5952a1669f9edfbe5a81c28698f5f46f0d0ab93a489e25b5c8d9e0d3e5b2a118a657bc19a60 + languageName: node + linkType: hard + +"supports-color@npm:^2.0.0": + version: 2.0.0 + resolution: "supports-color@npm:2.0.0" + checksum: 10c0/570e0b63be36cccdd25186350a6cb2eaad332a95ff162fa06d9499982315f2fe4217e69dd98e862fbcd9c81eaff300a825a1fe7bf5cc752e5b84dfed042b0dda + languageName: node + linkType: hard + +"supports-color@npm:^5.3.0": + version: 5.5.0 + resolution: "supports-color@npm:5.5.0" + dependencies: + has-flag: "npm:^3.0.0" + checksum: 10c0/6ae5ff319bfbb021f8a86da8ea1f8db52fac8bd4d499492e30ec17095b58af11f0c55f8577390a749b1c4dde691b6a0315dab78f5f54c9b3d83f8fb5905c1c05 + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 + languageName: node + linkType: hard + +"supports-color@npm:^8.0.0": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 + languageName: node + linkType: hard + +"supports-color@npm:^9.0.0": + version: 9.4.0 + resolution: "supports-color@npm:9.4.0" + checksum: 10c0/6c24e6b2b64c6a60e5248490cfa50de5924da32cf09ae357ad8ebbf305cc5d2717ba705a9d4cb397d80bbf39417e8fdc8d7a0ce18bd0041bf7b5b456229164e4 + languageName: node + linkType: hard + +"supports-preserve-symlinks-flag@npm:^1.0.0": + version: 1.0.0 + resolution: "supports-preserve-symlinks-flag@npm:1.0.0" + checksum: 10c0/6c4032340701a9950865f7ae8ef38578d8d7053f5e10518076e6554a9381fa91bd9c6850193695c141f32b21f979c985db07265a758867bac95de05f7d8aeb39 + languageName: node + linkType: hard + +"svg-parser@npm:^2.0.4": + version: 2.0.4 + resolution: "svg-parser@npm:2.0.4" + checksum: 10c0/02f6cb155dd7b63ebc2f44f36365bc294543bebb81b614b7628f1af3c54ab64f7e1cec20f06e252bf95bdde78441ae295a412c68ad1678f16a6907d924512b7a + languageName: node + linkType: hard + +"svgo@npm:^3.0.2, svgo@npm:^3.2.0": + version: 3.3.2 + resolution: "svgo@npm:3.3.2" + dependencies: + "@trysound/sax": "npm:0.2.0" + commander: "npm:^7.2.0" + css-select: "npm:^5.1.0" + css-tree: "npm:^2.3.1" + css-what: "npm:^6.1.0" + csso: "npm:^5.0.5" + picocolors: "npm:^1.0.0" + bin: + svgo: ./bin/svgo + checksum: 10c0/a6badbd3d1d6dbb177f872787699ab34320b990d12e20798ecae915f0008796a0f3c69164f1485c9def399e0ce0a5683eb4a8045e51a5e1c364bb13a0d9f79e1 + languageName: node + linkType: hard + +"synckit@npm:^0.9.0, synckit@npm:^0.9.1": + version: 0.9.1 + resolution: "synckit@npm:0.9.1" + dependencies: + "@pkgr/core": "npm:^0.1.0" + tslib: "npm:^2.6.2" + checksum: 10c0/d8b89e1bf30ba3ffb469d8418c836ad9c0c062bf47028406b4d06548bc66af97155ea2303b96c93bf5c7c0f0d66153a6fbd6924c76521b434e6a9898982abc2e + languageName: node + linkType: hard + +"tapable@npm:^1.0.0": + version: 1.1.3 + resolution: "tapable@npm:1.1.3" + checksum: 10c0/c9f0265e55e45821ec672b9b9ee8a35d95bf3ea6b352199f8606a2799018e89cfe4433c554d424b31fc67c4be26b05d4f36dc3c607def416fdb2514cd63dba50 + languageName: node + linkType: hard + +"tapable@npm:^2.0.0, tapable@npm:^2.1.1, tapable@npm:^2.2.0, tapable@npm:^2.2.1": + version: 2.2.1 + resolution: "tapable@npm:2.2.1" + checksum: 10c0/bc40e6efe1e554d075469cedaba69a30eeb373552aaf41caeaaa45bf56ffacc2674261b106245bd566b35d8f3329b52d838e851ee0a852120acae26e622925c9 + languageName: node + linkType: hard + +"tar-fs@npm:^2.0.0": + version: 2.1.1 + resolution: "tar-fs@npm:2.1.1" + dependencies: + chownr: "npm:^1.1.1" + mkdirp-classic: "npm:^0.5.2" + pump: "npm:^3.0.0" + tar-stream: "npm:^2.1.4" + checksum: 10c0/871d26a934bfb7beeae4c4d8a09689f530b565f79bd0cf489823ff0efa3705da01278160da10bb006d1a793fa0425cf316cec029b32a9159eacbeaff4965fb6d + languageName: node + linkType: hard + +"tar-fs@npm:^3.0.4": + version: 3.0.6 + resolution: "tar-fs@npm:3.0.6" + dependencies: + bare-fs: "npm:^2.1.1" + bare-path: "npm:^2.1.0" + pump: "npm:^3.0.0" + tar-stream: "npm:^3.1.5" + dependenciesMeta: + bare-fs: + optional: true + bare-path: + optional: true + checksum: 10c0/207b7c0f193495668bd9dbad09a0108ce4ffcfec5bce2133f90988cdda5c81fad83c99f963d01e47b565196594f7a17dbd063ae55b97b36268fcc843975278ee + languageName: node + linkType: hard + +"tar-stream@npm:^2.1.4": + version: 2.2.0 + resolution: "tar-stream@npm:2.2.0" + dependencies: + bl: "npm:^4.0.3" + end-of-stream: "npm:^1.4.1" + fs-constants: "npm:^1.0.0" + inherits: "npm:^2.0.3" + readable-stream: "npm:^3.1.1" + checksum: 10c0/2f4c910b3ee7196502e1ff015a7ba321ec6ea837667220d7bcb8d0852d51cb04b87f7ae471008a6fb8f5b1a1b5078f62f3a82d30c706f20ada1238ac797e7692 + languageName: node + linkType: hard + +"tar-stream@npm:^3.1.5": + version: 3.1.7 + resolution: "tar-stream@npm:3.1.7" + dependencies: + b4a: "npm:^1.6.4" + fast-fifo: "npm:^1.2.0" + streamx: "npm:^2.15.0" + checksum: 10c0/a09199d21f8714bd729993ac49b6c8efcb808b544b89f23378ad6ffff6d1cb540878614ba9d4cfec11a64ef39e1a6f009a5398371491eb1fda606ffc7f70f718 + languageName: node + linkType: hard + +"tar@npm:^6.1.11, tar@npm:^6.2.1": + version: 6.2.1 + resolution: "tar@npm:6.2.1" + dependencies: + chownr: "npm:^2.0.0" + fs-minipass: "npm:^2.0.0" + minipass: "npm:^5.0.0" + minizlib: "npm:^2.1.1" + mkdirp: "npm:^1.0.3" + yallist: "npm:^4.0.0" + checksum: 10c0/a5eca3eb50bc11552d453488344e6507156b9193efd7635e98e867fab275d527af53d8866e2370cd09dfe74378a18111622ace35af6a608e5223a7d27fe99537 + languageName: node + linkType: hard + +"terser-webpack-plugin@npm:^5.3.10, terser-webpack-plugin@npm:^5.3.9": + version: 5.3.10 + resolution: "terser-webpack-plugin@npm:5.3.10" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.20" + jest-worker: "npm:^27.4.5" + schema-utils: "npm:^3.1.1" + serialize-javascript: "npm:^6.0.1" + terser: "npm:^5.26.0" + peerDependencies: + webpack: ^5.1.0 + peerDependenciesMeta: + "@swc/core": + optional: true + esbuild: + optional: true + uglify-js: + optional: true + checksum: 10c0/66d1ed3174542560911cf96f4716aeea8d60e7caab212291705d50072b6ba844c7391442541b13c848684044042bea9ec87512b8506528c12854943da05faf91 + languageName: node + linkType: hard + +"terser@npm:^5.10.0, terser@npm:^5.15.1, terser@npm:^5.26.0": + version: 5.31.6 + resolution: "terser@npm:5.31.6" + dependencies: + "@jridgewell/source-map": "npm:^0.3.3" + acorn: "npm:^8.8.2" + commander: "npm:^2.20.0" + source-map-support: "npm:~0.5.20" + bin: + terser: bin/terser + checksum: 10c0/b17d02b65a52a5041430572b3c514475820f5e7590fa93773c0f5b4be601ccf3f6d745bf5a79f3ee58187cf85edf61c24ddf4345783839fccb44c9c8fa9b427e + languageName: node + linkType: hard + +"text-decoder@npm:^1.1.0": + version: 1.1.1 + resolution: "text-decoder@npm:1.1.1" + dependencies: + b4a: "npm:^1.6.4" + checksum: 10c0/e527d05454b59c0fa77456495de68c88e560a122de3dd28b3ebdbf81828aabeaa7e9bb8054b9eb52bc5029ccb5899ad04f466cbba3c53b2685270599d1710cee + languageName: node + linkType: hard + +"text-table@npm:^0.2.0": + version: 0.2.0 + resolution: "text-table@npm:0.2.0" + checksum: 10c0/02805740c12851ea5982686810702e2f14369a5f4c5c40a836821e3eefc65ffeec3131ba324692a37608294b0fd8c1e55a2dd571ffed4909822787668ddbee5c + languageName: node + linkType: hard + +"thunky@npm:^1.0.2": + version: 1.1.0 + resolution: "thunky@npm:1.1.0" + checksum: 10c0/369764f39de1ce1de2ba2fa922db4a3f92e9c7f33bcc9a713241bc1f4a5238b484c17e0d36d1d533c625efb00e9e82c3e45f80b47586945557b45abb890156d2 + languageName: node + linkType: hard + +"tiny-invariant@npm:^1.0.2": + version: 1.3.3 + resolution: "tiny-invariant@npm:1.3.3" + checksum: 10c0/65af4a07324b591a059b35269cd696aba21bef2107f29b9f5894d83cc143159a204b299553435b03874ebb5b94d019afa8b8eff241c8a4cfee95872c2e1c1c4a + languageName: node + linkType: hard + +"tiny-warning@npm:^1.0.0": + version: 1.0.3 + resolution: "tiny-warning@npm:1.0.3" + checksum: 10c0/ef8531f581b30342f29670cb41ca248001c6fd7975ce22122bd59b8d62b4fc84ad4207ee7faa95cde982fa3357cd8f4be650142abc22805538c3b1392d7084fa + languageName: node + linkType: hard + +"to-fast-properties@npm:^2.0.0": + version: 2.0.0 + resolution: "to-fast-properties@npm:2.0.0" + checksum: 10c0/b214d21dbfb4bce3452b6244b336806ffea9c05297148d32ebb428d5c43ce7545bdfc65a1ceb58c9ef4376a65c0cb2854d645f33961658b3e3b4f84910ddcdd7 + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: "npm:^7.0.0" + checksum: 10c0/487988b0a19c654ff3e1961b87f471702e708fa8a8dd02a298ef16da7206692e8552a0250e8b3e8759270f62e9d8314616f6da274734d3b558b1fc7b7724e892 + languageName: node + linkType: hard + +"toidentifier@npm:1.0.1": + version: 1.0.1 + resolution: "toidentifier@npm:1.0.1" + checksum: 10c0/93937279934bd66cc3270016dd8d0afec14fb7c94a05c72dc57321f8bd1fa97e5bea6d1f7c89e728d077ca31ea125b78320a616a6c6cd0e6b9cb94cb864381c1 + languageName: node + linkType: hard + +"totalist@npm:^3.0.0": + version: 3.0.1 + resolution: "totalist@npm:3.0.1" + checksum: 10c0/4bb1fadb69c3edbef91c73ebef9d25b33bbf69afe1e37ce544d5f7d13854cda15e47132f3e0dc4cafe300ddb8578c77c50a65004d8b6e97e77934a69aa924863 + languageName: node + linkType: hard + +"trim-lines@npm:^3.0.0": + version: 3.0.1 + resolution: "trim-lines@npm:3.0.1" + checksum: 10c0/3a1611fa9e52aa56a94c69951a9ea15b8aaad760eaa26c56a65330dc8adf99cb282fc07cc9d94968b7d4d88003beba220a7278bbe2063328eb23fb56f9509e94 + languageName: node + linkType: hard + +"trough@npm:^2.0.0": + version: 2.2.0 + resolution: "trough@npm:2.2.0" + checksum: 10c0/58b671fc970e7867a48514168894396dd94e6d9d6456aca427cc299c004fe67f35ed7172a36449086b2edde10e78a71a284ec0076809add6834fb8f857ccb9b0 + languageName: node + linkType: hard + +"ts-api-utils@npm:^1.0.1, ts-api-utils@npm:^1.3.0": + version: 1.3.0 + resolution: "ts-api-utils@npm:1.3.0" + peerDependencies: + typescript: ">=4.2.0" + checksum: 10c0/f54a0ba9ed56ce66baea90a3fa087a484002e807f28a8ccb2d070c75e76bde64bd0f6dce98b3802834156306050871b67eec325cb4e918015a360a3f0868c77c + languageName: node + linkType: hard + +"ts-dedent@npm:^2.2.0": + version: 2.2.0 + resolution: "ts-dedent@npm:2.2.0" + checksum: 10c0/175adea838468cc2ff7d5e97f970dcb798bbcb623f29c6088cb21aa2880d207c5784be81ab1741f56b9ac37840cbaba0c0d79f7f8b67ffe61c02634cafa5c303 + languageName: node + linkType: hard + +"tslib@npm:^1.8.1": + version: 1.14.1 + resolution: "tslib@npm:1.14.1" + checksum: 10c0/69ae09c49eea644bc5ebe1bca4fa4cc2c82b7b3e02f43b84bd891504edf66dbc6b2ec0eef31a957042de2269139e4acff911e6d186a258fb14069cd7f6febce2 + languageName: node + linkType: hard + +"tslib@npm:^2.0.3, tslib@npm:^2.6.0, tslib@npm:^2.6.2": + version: 2.6.3 + resolution: "tslib@npm:2.6.3" + checksum: 10c0/2598aef53d9dbe711af75522464b2104724d6467b26a60f2bdac8297d2b5f1f6b86a71f61717384aa8fd897240467aaa7bcc36a0700a0faf751293d1331db39a + languageName: node + linkType: hard + +"tsutils@npm:^3.21.0": + version: 3.21.0 + resolution: "tsutils@npm:3.21.0" + dependencies: + tslib: "npm:^1.8.1" + peerDependencies: + typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + checksum: 10c0/02f19e458ec78ead8fffbf711f834ad8ecd2cc6ade4ec0320790713dccc0a412b99e7fd907c4cda2a1dc602c75db6f12e0108e87a5afad4b2f9e90a24cabd5a2 + languageName: node + linkType: hard + +"tunnel-agent@npm:^0.6.0": + version: 0.6.0 + resolution: "tunnel-agent@npm:0.6.0" + dependencies: + safe-buffer: "npm:^5.0.1" + checksum: 10c0/4c7a1b813e7beae66fdbf567a65ec6d46313643753d0beefb3c7973d66fcec3a1e7f39759f0a0b4465883499c6dc8b0750ab8b287399af2e583823e40410a17a + languageName: node + linkType: hard + +"type-check@npm:^0.4.0, type-check@npm:~0.4.0": + version: 0.4.0 + resolution: "type-check@npm:0.4.0" + dependencies: + prelude-ls: "npm:^1.2.1" + checksum: 10c0/7b3fd0ed43891e2080bf0c5c504b418fbb3e5c7b9708d3d015037ba2e6323a28152ec163bcb65212741fa5d2022e3075ac3c76440dbd344c9035f818e8ecee58 + languageName: node + linkType: hard + +"type-fest@npm:^0.20.2": + version: 0.20.2 + resolution: "type-fest@npm:0.20.2" + checksum: 10c0/dea9df45ea1f0aaa4e2d3bed3f9a0bfe9e5b2592bddb92eb1bf06e50bcf98dbb78189668cd8bc31a0511d3fc25539b4cd5c704497e53e93e2d40ca764b10bfc3 + languageName: node + linkType: hard + +"type-fest@npm:^1.0.1": + version: 1.4.0 + resolution: "type-fest@npm:1.4.0" + checksum: 10c0/a3c0f4ee28ff6ddf800d769eafafcdeab32efa38763c1a1b8daeae681920f6e345d7920bf277245235561d8117dab765cb5f829c76b713b4c9de0998a5397141 + languageName: node + linkType: hard + +"type-fest@npm:^2.13.0, type-fest@npm:^2.5.0": + version: 2.19.0 + resolution: "type-fest@npm:2.19.0" + checksum: 10c0/a5a7ecf2e654251613218c215c7493574594951c08e52ab9881c9df6a6da0aeca7528c213c622bc374b4e0cb5c443aa3ab758da4e3c959783ce884c3194e12cb + languageName: node + linkType: hard + +"type-fest@npm:^3.8.0": + version: 3.13.1 + resolution: "type-fest@npm:3.13.1" + checksum: 10c0/547d22186f73a8c04590b70dcf63baff390078c75ea8acd366bbd510fd0646e348bd1970e47ecf795b7cff0b41d26e9c475c1fedd6ef5c45c82075fbf916b629 + languageName: node + linkType: hard + +"type-is@npm:~1.6.18": + version: 1.6.18 + resolution: "type-is@npm:1.6.18" + dependencies: + media-typer: "npm:0.3.0" + mime-types: "npm:~2.1.24" + checksum: 10c0/a23daeb538591b7efbd61ecf06b6feb2501b683ffdc9a19c74ef5baba362b4347e42f1b4ed81f5882a8c96a3bfff7f93ce3ffaf0cbbc879b532b04c97a55db9d + languageName: node + linkType: hard + +"typed-array-buffer@npm:^1.0.2": + version: 1.0.2 + resolution: "typed-array-buffer@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.7" + es-errors: "npm:^1.3.0" + is-typed-array: "npm:^1.1.13" + checksum: 10c0/9e043eb38e1b4df4ddf9dde1aa64919ae8bb909571c1cc4490ba777d55d23a0c74c7d73afcdd29ec98616d91bb3ae0f705fad4421ea147e1daf9528200b562da + languageName: node + linkType: hard + +"typed-array-byte-length@npm:^1.0.1": + version: 1.0.1 + resolution: "typed-array-byte-length@npm:1.0.1" + dependencies: + call-bind: "npm:^1.0.7" + for-each: "npm:^0.3.3" + gopd: "npm:^1.0.1" + has-proto: "npm:^1.0.3" + is-typed-array: "npm:^1.1.13" + checksum: 10c0/fcebeffb2436c9f355e91bd19e2368273b88c11d1acc0948a2a306792f1ab672bce4cfe524ab9f51a0505c9d7cd1c98eff4235c4f6bfef6a198f6cfc4ff3d4f3 + languageName: node + linkType: hard + +"typed-array-byte-offset@npm:^1.0.2": + version: 1.0.2 + resolution: "typed-array-byte-offset@npm:1.0.2" + dependencies: + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.7" + for-each: "npm:^0.3.3" + gopd: "npm:^1.0.1" + has-proto: "npm:^1.0.3" + is-typed-array: "npm:^1.1.13" + checksum: 10c0/d2628bc739732072e39269389a758025f75339de2ed40c4f91357023c5512d237f255b633e3106c461ced41907c1bf9a533c7e8578066b0163690ca8bc61b22f + languageName: node + linkType: hard + +"typed-array-length@npm:^1.0.6": + version: 1.0.6 + resolution: "typed-array-length@npm:1.0.6" + dependencies: + call-bind: "npm:^1.0.7" + for-each: "npm:^0.3.3" + gopd: "npm:^1.0.1" + has-proto: "npm:^1.0.3" + is-typed-array: "npm:^1.1.13" + possible-typed-array-names: "npm:^1.0.0" + checksum: 10c0/74253d7dc488eb28b6b2711cf31f5a9dcefc9c41b0681fd1c178ed0a1681b4468581a3626d39cd4df7aee3d3927ab62be06aa9ca74e5baf81827f61641445b77 + languageName: node + linkType: hard + +"typedarray-to-buffer@npm:^3.1.5": + version: 3.1.5 + resolution: "typedarray-to-buffer@npm:3.1.5" + dependencies: + is-typedarray: "npm:^1.0.0" + checksum: 10c0/4ac5b7a93d604edabf3ac58d3a2f7e07487e9f6e98195a080e81dbffdc4127817f470f219d794a843b87052cedef102b53ac9b539855380b8c2172054b7d5027 + languageName: node + linkType: hard + +"typedarray@npm:^0.0.6": + version: 0.0.6 + resolution: "typedarray@npm:0.0.6" + checksum: 10c0/6005cb31df50eef8b1f3c780eb71a17925f3038a100d82f9406ac2ad1de5eb59f8e6decbdc145b3a1f8e5836e17b0c0002fb698b9fe2516b8f9f9ff602d36412 + languageName: node + linkType: hard + +"typescript-eslint@npm:^8.4.0": + version: 8.4.0 + resolution: "typescript-eslint@npm:8.4.0" + dependencies: + "@typescript-eslint/eslint-plugin": "npm:8.4.0" + "@typescript-eslint/parser": "npm:8.4.0" + "@typescript-eslint/utils": "npm:8.4.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 10c0/266ef73fdc1f7fa19228b8653d61ad143261ccd35f7d5d647092ed0e1512de2d4e3d1b9e1f2520658708cc0c1d7925c4ec97f23440c180a3bf1716e81d65123f + languageName: node + linkType: hard + +"typescript@npm:^5.2.2, typescript@npm:~5.5.4": + version: 5.5.4 + resolution: "typescript@npm:5.5.4" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/422be60f89e661eab29ac488c974b6cc0a660fb2228003b297c3d10c32c90f3bcffc1009b43876a082515a3c376b1eefcce823d6e78982e6878408b9a923199c + languageName: node + linkType: hard + +"typescript@patch:typescript@npm%3A^5.2.2#optional!builtin, typescript@patch:typescript@npm%3A~5.5.4#optional!builtin": + version: 5.5.4 + resolution: "typescript@patch:typescript@npm%3A5.5.4#optional!builtin::version=5.5.4&hash=379a07" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/73409d7b9196a5a1217b3aaad929bf76294d3ce7d6e9766dd880ece296ee91cf7d7db6b16c6c6c630ee5096eccde726c0ef17c7dfa52b01a243e57ae1f09ef07 + languageName: node + linkType: hard + +"unbox-primitive@npm:^1.0.2": + version: 1.0.2 + resolution: "unbox-primitive@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.2" + has-bigints: "npm:^1.0.2" + has-symbols: "npm:^1.0.3" + which-boxed-primitive: "npm:^1.0.2" + checksum: 10c0/81ca2e81134167cc8f75fa79fbcc8a94379d6c61de67090986a2273850989dd3bae8440c163121b77434b68263e34787a675cbdcb34bb2f764c6b9c843a11b66 + languageName: node + linkType: hard + +"undici-types@npm:~6.19.2": + version: 6.19.8 + resolution: "undici-types@npm:6.19.8" + checksum: 10c0/078afa5990fba110f6824823ace86073b4638f1d5112ee26e790155f481f2a868cc3e0615505b6f4282bdf74a3d8caad715fd809e870c2bb0704e3ea6082f344 + languageName: node + linkType: hard + +"unicode-canonical-property-names-ecmascript@npm:^2.0.0": + version: 2.0.0 + resolution: "unicode-canonical-property-names-ecmascript@npm:2.0.0" + checksum: 10c0/0fe812641bcfa3ae433025178a64afb5d9afebc21a922dafa7cba971deebb5e4a37350423890750132a85c936c290fb988146d0b1bd86838ad4897f4fc5bd0de + languageName: node + linkType: hard + +"unicode-emoji-modifier-base@npm:^1.0.0": + version: 1.0.0 + resolution: "unicode-emoji-modifier-base@npm:1.0.0" + checksum: 10c0/b37623fcf0162186debd20f116483e035a2d5b905b932a2c472459d9143d446ebcbefb2a494e2fe4fa7434355396e2a95ec3fc1f0c29a3bc8f2c827220e79c66 + languageName: node + linkType: hard + +"unicode-match-property-ecmascript@npm:^2.0.0": + version: 2.0.0 + resolution: "unicode-match-property-ecmascript@npm:2.0.0" + dependencies: + unicode-canonical-property-names-ecmascript: "npm:^2.0.0" + unicode-property-aliases-ecmascript: "npm:^2.0.0" + checksum: 10c0/4d05252cecaf5c8e36d78dc5332e03b334c6242faf7cf16b3658525441386c0a03b5f603d42cbec0f09bb63b9fd25c9b3b09667aee75463cac3efadae2cd17ec + languageName: node + linkType: hard + +"unicode-match-property-value-ecmascript@npm:^2.1.0": + version: 2.1.0 + resolution: "unicode-match-property-value-ecmascript@npm:2.1.0" + checksum: 10c0/f5b9499b9e0ffdc6027b744d528f17ec27dd7c15da03254ed06851feec47e0531f20d410910c8a49af4a6a190f4978413794c8d75ce112950b56d583b5d5c7f2 + languageName: node + linkType: hard + +"unicode-property-aliases-ecmascript@npm:^2.0.0": + version: 2.1.0 + resolution: "unicode-property-aliases-ecmascript@npm:2.1.0" + checksum: 10c0/50ded3f8c963c7785e48c510a3b7c6bc4e08a579551489aa0349680a35b1ceceec122e33b2b6c1b579d0be2250f34bb163ac35f5f8695fe10bbc67fb757f0af8 + languageName: node + linkType: hard + +"unified-engine@npm:^11.2.0": + version: 11.2.1 + resolution: "unified-engine@npm:11.2.1" + dependencies: + "@types/concat-stream": "npm:^2.0.0" + "@types/debug": "npm:^4.0.0" + "@types/is-empty": "npm:^1.0.0" + "@types/node": "npm:^20.0.0" + "@types/unist": "npm:^3.0.0" + concat-stream: "npm:^2.0.0" + debug: "npm:^4.0.0" + extend: "npm:^3.0.0" + glob: "npm:^10.0.0" + ignore: "npm:^5.0.0" + is-empty: "npm:^1.0.0" + is-plain-obj: "npm:^4.0.0" + load-plugin: "npm:^6.0.0" + parse-json: "npm:^7.0.0" + trough: "npm:^2.0.0" + unist-util-inspect: "npm:^8.0.0" + vfile: "npm:^6.0.0" + vfile-message: "npm:^4.0.0" + vfile-reporter: "npm:^8.0.0" + vfile-statistics: "npm:^3.0.0" + yaml: "npm:^2.0.0" + checksum: 10c0/bd5f13c79ad6c279780a6a3461ac46a63191c7237b7e8c09bbe945e75302d021db773a16c70fbbb2bdd5d231feb3bc4b0d4bd74499eb3f71e4d91c678f33669b + languageName: node + linkType: hard + +"unified@npm:^11.0.0, unified@npm:^11.0.3, unified@npm:^11.0.4": + version: 11.0.5 + resolution: "unified@npm:11.0.5" + dependencies: + "@types/unist": "npm:^3.0.0" + bail: "npm:^2.0.0" + devlop: "npm:^1.0.0" + extend: "npm:^3.0.0" + is-plain-obj: "npm:^4.0.0" + trough: "npm:^2.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/53c8e685f56d11d9d458a43e0e74328a4d6386af51c8ac37a3dcabec74ce5026da21250590d4aff6733ccd7dc203116aae2b0769abc18cdf9639a54ae528dfc9 + languageName: node + linkType: hard + +"unique-filename@npm:^3.0.0": + version: 3.0.0 + resolution: "unique-filename@npm:3.0.0" + dependencies: + unique-slug: "npm:^4.0.0" + checksum: 10c0/6363e40b2fa758eb5ec5e21b3c7fb83e5da8dcfbd866cc0c199d5534c42f03b9ea9ab069769cc388e1d7ab93b4eeef28ef506ab5f18d910ef29617715101884f + languageName: node + linkType: hard + +"unique-slug@npm:^4.0.0": + version: 4.0.0 + resolution: "unique-slug@npm:4.0.0" + dependencies: + imurmurhash: "npm:^0.1.4" + checksum: 10c0/cb811d9d54eb5821b81b18205750be84cb015c20a4a44280794e915f5a0a70223ce39066781a354e872df3572e8155c228f43ff0cce94c7cbf4da2cc7cbdd635 + languageName: node + linkType: hard + +"unique-string@npm:^3.0.0": + version: 3.0.0 + resolution: "unique-string@npm:3.0.0" + dependencies: + crypto-random-string: "npm:^4.0.0" + checksum: 10c0/b35ea034b161b2a573666ec16c93076b4b6106b8b16c2415808d747ab3a0566b5db0c4be231d4b11cfbc16d7fd915c9d8a45884bff0e2db11b799775b2e1e017 + languageName: node + linkType: hard + +"unist-util-inspect@npm:^8.0.0": + version: 8.1.0 + resolution: "unist-util-inspect@npm:8.1.0" + dependencies: + "@types/unist": "npm:^3.0.0" + checksum: 10c0/d3dff256ffd77a1e8dd583be89070dc1ab124d424794fcc1105a38c2f0bb0538afc686e592699807c7d9fa612821961033fe38e26c11ba0bb51d19e8ae7c4119 + languageName: node + linkType: hard + +"unist-util-is@npm:^6.0.0": + version: 6.0.0 + resolution: "unist-util-is@npm:6.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + checksum: 10c0/9419352181eaa1da35eca9490634a6df70d2217815bb5938a04af3a662c12c5607a2f1014197ec9c426fbef18834f6371bfdb6f033040fa8aa3e965300d70e7e + languageName: node + linkType: hard + +"unist-util-position-from-estree@npm:^2.0.0": + version: 2.0.0 + resolution: "unist-util-position-from-estree@npm:2.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + checksum: 10c0/39127bf5f0594e0a76d9241dec4f7aa26323517120ce1edd5ed91c8c1b9df7d6fb18af556e4b6250f1c7368825720ed892e2b6923be5cdc08a9bb16536dc37b3 + languageName: node + linkType: hard + +"unist-util-position@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-position@npm:5.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + checksum: 10c0/dde3b31e314c98f12b4dc6402f9722b2bf35e96a4f2d463233dd90d7cde2d4928074a7a11eff0a5eb1f4e200f27fc1557e0a64a7e8e4da6558542f251b1b7400 + languageName: node + linkType: hard + +"unist-util-remove-position@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-remove-position@npm:5.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + unist-util-visit: "npm:^5.0.0" + checksum: 10c0/e8c76da4399446b3da2d1c84a97c607b37d03d1d92561e14838cbe4fdcb485bfc06c06cfadbb808ccb72105a80643976d0660d1fe222ca372203075be9d71105 + languageName: node + linkType: hard + +"unist-util-stringify-position@npm:^2.0.0": + version: 2.0.3 + resolution: "unist-util-stringify-position@npm:2.0.3" + dependencies: + "@types/unist": "npm:^2.0.2" + checksum: 10c0/46fa03f840df173b7f032cbfffdb502fb05b79b3fb5451681c796cf4985d9087a537833f5afb75d55e79b46bbbe4b3d81dd75a1062f9289091c526aebe201d5d + languageName: node + linkType: hard + +"unist-util-stringify-position@npm:^3.0.0": + version: 3.0.3 + resolution: "unist-util-stringify-position@npm:3.0.3" + dependencies: + "@types/unist": "npm:^2.0.0" + checksum: 10c0/14550027825230528f6437dad7f2579a841780318569851291be6c8a970bae6f65a7feb24dabbcfce0e5e68cacae85bf12cbda3f360f7c873b4db602bdf7bb21 + languageName: node + linkType: hard + +"unist-util-stringify-position@npm:^4.0.0": + version: 4.0.0 + resolution: "unist-util-stringify-position@npm:4.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + checksum: 10c0/dfe1dbe79ba31f589108cb35e523f14029b6675d741a79dea7e5f3d098785045d556d5650ec6a8338af11e9e78d2a30df12b1ee86529cded1098da3f17ee999e + languageName: node + linkType: hard + +"unist-util-visit-parents@npm:^6.0.0": + version: 6.0.1 + resolution: "unist-util-visit-parents@npm:6.0.1" + dependencies: + "@types/unist": "npm:^3.0.0" + unist-util-is: "npm:^6.0.0" + checksum: 10c0/51b1a5b0aa23c97d3e03e7288f0cdf136974df2217d0999d3de573c05001ef04cccd246f51d2ebdfb9e8b0ed2704451ad90ba85ae3f3177cf9772cef67f56206 + languageName: node + linkType: hard + +"unist-util-visit@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-visit@npm:5.0.0" + dependencies: + "@types/unist": "npm:^3.0.0" + unist-util-is: "npm:^6.0.0" + unist-util-visit-parents: "npm:^6.0.0" + checksum: 10c0/51434a1d80252c1540cce6271a90fd1a106dbe624997c09ed8879279667fb0b2d3a685e02e92bf66598dcbe6cdffa7a5f5fb363af8fdf90dda6c855449ae39a5 + languageName: node + linkType: hard + +"universalify@npm:^2.0.0": + version: 2.0.1 + resolution: "universalify@npm:2.0.1" + checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a + languageName: node + linkType: hard + +"unpipe@npm:1.0.0, unpipe@npm:~1.0.0": + version: 1.0.0 + resolution: "unpipe@npm:1.0.0" + checksum: 10c0/193400255bd48968e5c5383730344fbb4fa114cdedfab26e329e50dd2d81b134244bb8a72c6ac1b10ab0281a58b363d06405632c9d49ca9dfd5e90cbd7d0f32c + languageName: node + linkType: hard + +"update-browserslist-db@npm:^1.1.0": + version: 1.1.0 + resolution: "update-browserslist-db@npm:1.1.0" + dependencies: + escalade: "npm:^3.1.2" + picocolors: "npm:^1.0.1" + peerDependencies: + browserslist: ">= 4.21.0" + bin: + update-browserslist-db: cli.js + checksum: 10c0/a7452de47785842736fb71547651c5bbe5b4dc1e3722ccf48a704b7b34e4dcf633991eaa8e4a6a517ffb738b3252eede3773bef673ef9021baa26b056d63a5b9 + languageName: node + linkType: hard + +"update-notifier@npm:^6.0.2": + version: 6.0.2 + resolution: "update-notifier@npm:6.0.2" + dependencies: + boxen: "npm:^7.0.0" + chalk: "npm:^5.0.1" + configstore: "npm:^6.0.0" + has-yarn: "npm:^3.0.0" + import-lazy: "npm:^4.0.0" + is-ci: "npm:^3.0.1" + is-installed-globally: "npm:^0.4.0" + is-npm: "npm:^6.0.0" + is-yarn-global: "npm:^0.4.0" + latest-version: "npm:^7.0.0" + pupa: "npm:^3.1.0" + semver: "npm:^7.3.7" + semver-diff: "npm:^4.0.0" + xdg-basedir: "npm:^5.1.0" + checksum: 10c0/ad3980073312df904133a6e6c554a7f9d0832ed6275e55f5a546313fe77a0f20f23a7b1b4aeb409e20a78afb06f4d3b2b28b332d9cfb55745b5d1ea155810bcc + languageName: node + linkType: hard + +"uri-js@npm:^4.2.2": + version: 4.4.1 + resolution: "uri-js@npm:4.4.1" + dependencies: + punycode: "npm:^2.1.0" + checksum: 10c0/4ef57b45aa820d7ac6496e9208559986c665e49447cb072744c13b66925a362d96dd5a46c4530a6b8e203e5db5fe849369444440cb22ecfc26c679359e5dfa3c + languageName: node + linkType: hard + +"url-loader@npm:^4.1.1": + version: 4.1.1 + resolution: "url-loader@npm:4.1.1" + dependencies: + loader-utils: "npm:^2.0.0" + mime-types: "npm:^2.1.27" + schema-utils: "npm:^3.0.0" + peerDependencies: + file-loader: "*" + webpack: ^4.0.0 || ^5.0.0 + peerDependenciesMeta: + file-loader: + optional: true + checksum: 10c0/71b6300e02ce26c70625eae1a2297c0737635038c62691bb3007ac33e85c0130efc74bfb444baf5c6b3bad5953491159d31d66498967d1417865d0c7e7cd1a64 + languageName: node + linkType: hard + +"util-deprecate@npm:^1.0.1, util-deprecate@npm:^1.0.2, util-deprecate@npm:~1.0.1": + version: 1.0.2 + resolution: "util-deprecate@npm:1.0.2" + checksum: 10c0/41a5bdd214df2f6c3ecf8622745e4a366c4adced864bc3c833739791aeeeb1838119af7daed4ba36428114b5c67dcda034a79c882e97e43c03e66a4dd7389942 + languageName: node + linkType: hard + +"utila@npm:~0.4": + version: 0.4.0 + resolution: "utila@npm:0.4.0" + checksum: 10c0/2791604e09ca4f77ae314df83e80d1805f867eb5c7e13e7413caee01273c278cf2c9a3670d8d25c889a877f7b149d892fe61b0181a81654b425e9622ab23d42e + languageName: node + linkType: hard + +"utility-types@npm:^3.10.0": + version: 3.11.0 + resolution: "utility-types@npm:3.11.0" + checksum: 10c0/2f1580137b0c3e6cf5405f37aaa8f5249961a76d26f1ca8efc0ff49a2fc0e0b2db56de8e521a174d075758e0c7eb3e590edec0832eb44478b958f09914920f19 + languageName: node + linkType: hard + +"utils-merge@npm:1.0.1": + version: 1.0.1 + resolution: "utils-merge@npm:1.0.1" + checksum: 10c0/02ba649de1b7ca8854bfe20a82f1dfbdda3fb57a22ab4a8972a63a34553cf7aa51bc9081cf7e001b035b88186d23689d69e71b510e610a09a4c66f68aa95b672 + languageName: node + linkType: hard + +"uuid@npm:^8.3.2": + version: 8.3.2 + resolution: "uuid@npm:8.3.2" + bin: + uuid: dist/bin/uuid + checksum: 10c0/bcbb807a917d374a49f475fae2e87fdca7da5e5530820ef53f65ba1d12131bd81a92ecf259cc7ce317cbe0f289e7d79fdfebcef9bfa3087c8c8a2fa304c9be54 + languageName: node + linkType: hard + +"uuid@npm:^9.0.0": + version: 9.0.1 + resolution: "uuid@npm:9.0.1" + bin: + uuid: dist/bin/uuid + checksum: 10c0/1607dd32ac7fc22f2d8f77051e6a64845c9bce5cd3dd8aa0070c074ec73e666a1f63c7b4e0f4bf2bc8b9d59dc85a15e17807446d9d2b17c8485fbc2147b27f9b + languageName: node + linkType: hard + +"uvu@npm:^0.5.0, uvu@npm:^0.5.6": + version: 0.5.6 + resolution: "uvu@npm:0.5.6" + dependencies: + dequal: "npm:^2.0.0" + diff: "npm:^5.0.0" + kleur: "npm:^4.0.3" + sade: "npm:^1.7.3" + bin: + uvu: bin.js + checksum: 10c0/ad32eb5f7d94bdeb71f80d073003f0138e24f61ed68cecc8e15d2f30838f44c9670577bb1775c8fac894bf93d1bc1583d470a9195e49bfa6efa14cc6f4942bff + languageName: node + linkType: hard + +"validate-npm-package-license@npm:^3.0.4": + version: 3.0.4 + resolution: "validate-npm-package-license@npm:3.0.4" + dependencies: + spdx-correct: "npm:^3.0.0" + spdx-expression-parse: "npm:^3.0.0" + checksum: 10c0/7b91e455a8de9a0beaa9fe961e536b677da7f48c9a493edf4d4d4a87fd80a7a10267d438723364e432c2fcd00b5650b5378275cded362383ef570276e6312f4f + languageName: node + linkType: hard + +"validate-npm-package-name@npm:^5.0.0": + version: 5.0.1 + resolution: "validate-npm-package-name@npm:5.0.1" + checksum: 10c0/903e738f7387404bb72f7ac34e45d7010c877abd2803dc2d614612527927a40a6d024420033132e667b1bade94544b8a1f65c9431a4eb30d0ce0d80093cd1f74 + languageName: node + linkType: hard + +"validate-peer-dependencies@npm:^2.2.0": + version: 2.2.0 + resolution: "validate-peer-dependencies@npm:2.2.0" + dependencies: + resolve-package-path: "npm:^4.0.3" + semver: "npm:^7.3.8" + checksum: 10c0/0728592b335dbd5d1444019a4e36e34b4de3a8ade5a4970a5f87b40f881dedeff1a00eb3d79add155a4ab3b97c9990d11ed21d8a3d7dccadd129a5bdf5b02a5a + languageName: node + linkType: hard + +"value-equal@npm:^1.0.1": + version: 1.0.1 + resolution: "value-equal@npm:1.0.1" + checksum: 10c0/79068098355483ef29f4d3753999ad880875b87625d7e9055cad9346ea4b7662aad3a66f87976801b0dd7a6f828ba973d28b1669ebcd37eaf88cc5f687c1a691 + languageName: node + linkType: hard + +"vary@npm:~1.1.2": + version: 1.1.2 + resolution: "vary@npm:1.1.2" + checksum: 10c0/f15d588d79f3675135ba783c91a4083dcd290a2a5be9fcb6514220a1634e23df116847b1cc51f66bfb0644cf9353b2abb7815ae499bab06e46dd33c1a6bf1f4f + languageName: node + linkType: hard + +"vfile-location@npm:^5.0.0": + version: 5.0.3 + resolution: "vfile-location@npm:5.0.3" + dependencies: + "@types/unist": "npm:^3.0.0" + vfile: "npm:^6.0.0" + checksum: 10c0/1711f67802a5bc175ea69750d59863343ed43d1b1bb25c0a9063e4c70595e673e53e2ed5cdbb6dcdc370059b31605144d95e8c061b9361bcc2b036b8f63a4966 + languageName: node + linkType: hard + +"vfile-message@npm:^4.0.0": + version: 4.0.2 + resolution: "vfile-message@npm:4.0.2" + dependencies: + "@types/unist": "npm:^3.0.0" + unist-util-stringify-position: "npm:^4.0.0" + checksum: 10c0/07671d239a075f888b78f318bc1d54de02799db4e9dce322474e67c35d75ac4a5ac0aaf37b18801d91c9f8152974ea39678aa72d7198758b07f3ba04fb7d7514 + languageName: node + linkType: hard + +"vfile-reporter@npm:^8.0.0": + version: 8.1.1 + resolution: "vfile-reporter@npm:8.1.1" + dependencies: + "@types/supports-color": "npm:^8.0.0" + string-width: "npm:^6.0.0" + supports-color: "npm:^9.0.0" + unist-util-stringify-position: "npm:^4.0.0" + vfile: "npm:^6.0.0" + vfile-message: "npm:^4.0.0" + vfile-sort: "npm:^4.0.0" + vfile-statistics: "npm:^3.0.0" + checksum: 10c0/5da85c67e4a26762d64d65d0aac5ef339a413cc051470d970eea7352f07afd24577d42780c3af93c109177078df1bbbdbcc3e82adcc34e1bb96d2665f3f0c2a1 + languageName: node + linkType: hard + +"vfile-sort@npm:^4.0.0": + version: 4.0.0 + resolution: "vfile-sort@npm:4.0.0" + dependencies: + vfile: "npm:^6.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/fe1a4cbe24d03b81a7e7486be107eb029ac2631a3575e55a3f1d25cf54bcf2d60b3f76694dedf8a2f60793877e1d192234157cdfd50d1a0d18b9a4c1487cdf65 + languageName: node + linkType: hard + +"vfile-statistics@npm:^3.0.0": + version: 3.0.0 + resolution: "vfile-statistics@npm:3.0.0" + dependencies: + vfile: "npm:^6.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/3de51670329701e2cff75d979564087578844444d9b9d8619a2fdd2a904bc970bf4d05b58e7cee71e0f6f34087f1f7f2ea85cdfa5bf58f572c777432c156bd8f + languageName: node + linkType: hard + +"vfile@npm:^6.0.0, vfile@npm:^6.0.1": + version: 6.0.2 + resolution: "vfile@npm:6.0.2" + dependencies: + "@types/unist": "npm:^3.0.0" + unist-util-stringify-position: "npm:^4.0.0" + vfile-message: "npm:^4.0.0" + checksum: 10c0/96b7e060b332ff1b05462053bd9b0f39062c00c5eabb78fc75603cc808d5f77c4379857fffca3e30a28e0aad2d51c065dfcd4a43fbe15b1fc9c2aaa9ac1be8e1 + languageName: node + linkType: hard + +"vue-eslint-parser@npm:^9.1.0": + version: 9.4.3 + resolution: "vue-eslint-parser@npm:9.4.3" + dependencies: + debug: "npm:^4.3.4" + eslint-scope: "npm:^7.1.1" + eslint-visitor-keys: "npm:^3.3.0" + espree: "npm:^9.3.1" + esquery: "npm:^1.4.0" + lodash: "npm:^4.17.21" + semver: "npm:^7.3.6" + peerDependencies: + eslint: ">=6.0.0" + checksum: 10c0/128be5988de025b5abd676a91c3e92af68288a5da1c20b2ff848fe90e040c04b2222a03b5d8048cf4a5e0b667a8addfb6f6e6565860d4afb5190c4cc42d05578 + languageName: node + linkType: hard + +"walk-up-path@npm:^3.0.1": + version: 3.0.1 + resolution: "walk-up-path@npm:3.0.1" + checksum: 10c0/3184738e0cf33698dd58b0ee4418285b9c811e58698f52c1f025435a85c25cbc5a63fee599f1a79cb29ca7ef09a44ec9417b16bfd906b1a37c305f7aa20ee5bc + languageName: node + linkType: hard + +"watchpack@npm:^2.4.1": + version: 2.4.2 + resolution: "watchpack@npm:2.4.2" + dependencies: + glob-to-regexp: "npm:^0.4.1" + graceful-fs: "npm:^4.1.2" + checksum: 10c0/ec60a5f0e9efaeca0102fd9126346b3b2d523e01c34030d3fddf5813a7125765121ebdc2552981136dcd2c852deb1af0b39340f2fcc235f292db5399d0283577 + languageName: node + linkType: hard + +"wbuf@npm:^1.1.0, wbuf@npm:^1.7.3": + version: 1.7.3 + resolution: "wbuf@npm:1.7.3" + dependencies: + minimalistic-assert: "npm:^1.0.0" + checksum: 10c0/56edcc5ef2b3d30913ba8f1f5cccc364d180670b24d5f3f8849c1e6fb514e5c7e3a87548ae61227a82859eba6269c11393ae24ce12a2ea1ecb9b465718ddced7 + languageName: node + linkType: hard + +"web-namespaces@npm:^2.0.0": + version: 2.0.1 + resolution: "web-namespaces@npm:2.0.1" + checksum: 10c0/df245f466ad83bd5cd80bfffc1674c7f64b7b84d1de0e4d2c0934fb0782e0a599164e7197a4bce310ee3342fd61817b8047ff04f076a1ce12dd470584142a4bd + languageName: node + linkType: hard + +"web-worker@npm:^1.2.0": + version: 1.3.0 + resolution: "web-worker@npm:1.3.0" + checksum: 10c0/bca341b421f07c2d33aa205d463e6a2d3d376fb0628a01052dc343fd88a1d688df58d1c7fe36f631d0d860bbd3060f5014cca67d6f8781634b6c2fae25d1fc70 + languageName: node + linkType: hard + +"webpack-bundle-analyzer@npm:^4.9.0": + version: 4.10.2 + resolution: "webpack-bundle-analyzer@npm:4.10.2" + dependencies: + "@discoveryjs/json-ext": "npm:0.5.7" + acorn: "npm:^8.0.4" + acorn-walk: "npm:^8.0.0" + commander: "npm:^7.2.0" + debounce: "npm:^1.2.1" + escape-string-regexp: "npm:^4.0.0" + gzip-size: "npm:^6.0.0" + html-escaper: "npm:^2.0.2" + opener: "npm:^1.5.2" + picocolors: "npm:^1.0.0" + sirv: "npm:^2.0.3" + ws: "npm:^7.3.1" + bin: + webpack-bundle-analyzer: lib/bin/analyzer.js + checksum: 10c0/00603040e244ead15b2d92981f0559fa14216381349412a30070a7358eb3994cd61a8221d34a3b3fb8202dc3d1c5ee1fbbe94c5c52da536e5b410aa1cf279a48 + languageName: node + linkType: hard + +"webpack-dev-middleware@npm:^5.3.4": + version: 5.3.4 + resolution: "webpack-dev-middleware@npm:5.3.4" + dependencies: + colorette: "npm:^2.0.10" + memfs: "npm:^3.4.3" + mime-types: "npm:^2.1.31" + range-parser: "npm:^1.2.1" + schema-utils: "npm:^4.0.0" + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + checksum: 10c0/257df7d6bc5494d1d3cb66bba70fbdf5a6e0423e39b6420f7631aeb52435afbfbff8410a62146dcdf3d2f945c62e03193aae2ac1194a2f7d5a2523b9d194e9e1 + languageName: node + linkType: hard + +"webpack-dev-server@npm:^4.15.1": + version: 4.15.2 + resolution: "webpack-dev-server@npm:4.15.2" + dependencies: + "@types/bonjour": "npm:^3.5.9" + "@types/connect-history-api-fallback": "npm:^1.3.5" + "@types/express": "npm:^4.17.13" + "@types/serve-index": "npm:^1.9.1" + "@types/serve-static": "npm:^1.13.10" + "@types/sockjs": "npm:^0.3.33" + "@types/ws": "npm:^8.5.5" + ansi-html-community: "npm:^0.0.8" + bonjour-service: "npm:^1.0.11" + chokidar: "npm:^3.5.3" + colorette: "npm:^2.0.10" + compression: "npm:^1.7.4" + connect-history-api-fallback: "npm:^2.0.0" + default-gateway: "npm:^6.0.3" + express: "npm:^4.17.3" + graceful-fs: "npm:^4.2.6" + html-entities: "npm:^2.3.2" + http-proxy-middleware: "npm:^2.0.3" + ipaddr.js: "npm:^2.0.1" + launch-editor: "npm:^2.6.0" + open: "npm:^8.0.9" + p-retry: "npm:^4.5.0" + rimraf: "npm:^3.0.2" + schema-utils: "npm:^4.0.0" + selfsigned: "npm:^2.1.1" + serve-index: "npm:^1.9.1" + sockjs: "npm:^0.3.24" + spdy: "npm:^4.0.2" + webpack-dev-middleware: "npm:^5.3.4" + ws: "npm:^8.13.0" + peerDependencies: + webpack: ^4.37.0 || ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + webpack-cli: + optional: true + bin: + webpack-dev-server: bin/webpack-dev-server.js + checksum: 10c0/625bd5b79360afcf98782c8b1fd710b180bb0e96d96b989defff550c546890010ceea82ffbecb2a0a23f7f018bc72f2dee7b3070f7b448fb0110df6657fb2904 + languageName: node + linkType: hard + +"webpack-merge@npm:^5.9.0": + version: 5.10.0 + resolution: "webpack-merge@npm:5.10.0" + dependencies: + clone-deep: "npm:^4.0.1" + flat: "npm:^5.0.2" + wildcard: "npm:^2.0.0" + checksum: 10c0/b607c84cabaf74689f965420051a55a08722d897bdd6c29cb0b2263b451c090f962d41ecf8c9bf56b0ab3de56e65476ace0a8ecda4f4a4663684243d90e0512b + languageName: node + linkType: hard + +"webpack-sources@npm:^3.2.3": + version: 3.2.3 + resolution: "webpack-sources@npm:3.2.3" + checksum: 10c0/2ef63d77c4fad39de4a6db17323d75eb92897b32674e97d76f0a1e87c003882fc038571266ad0ef581ac734cbe20952912aaa26155f1905e96ce251adbb1eb4e + languageName: node + linkType: hard + +"webpack@npm:^5.88.1, webpack@npm:^5.94.0": + version: 5.94.0 + resolution: "webpack@npm:5.94.0" + dependencies: + "@types/estree": "npm:^1.0.5" + "@webassemblyjs/ast": "npm:^1.12.1" + "@webassemblyjs/wasm-edit": "npm:^1.12.1" + "@webassemblyjs/wasm-parser": "npm:^1.12.1" + acorn: "npm:^8.7.1" + acorn-import-attributes: "npm:^1.9.5" + browserslist: "npm:^4.21.10" + chrome-trace-event: "npm:^1.0.2" + enhanced-resolve: "npm:^5.17.1" + es-module-lexer: "npm:^1.2.1" + eslint-scope: "npm:5.1.1" + events: "npm:^3.2.0" + glob-to-regexp: "npm:^0.4.1" + graceful-fs: "npm:^4.2.11" + json-parse-even-better-errors: "npm:^2.3.1" + loader-runner: "npm:^4.2.0" + mime-types: "npm:^2.1.27" + neo-async: "npm:^2.6.2" + schema-utils: "npm:^3.2.0" + tapable: "npm:^2.1.1" + terser-webpack-plugin: "npm:^5.3.10" + watchpack: "npm:^2.4.1" + webpack-sources: "npm:^3.2.3" + peerDependenciesMeta: + webpack-cli: + optional: true + bin: + webpack: bin/webpack.js + checksum: 10c0/b4d1b751f634079bd177a89eef84d80fa5bb8d6fc15d72ab40fc2b9ca5167a79b56585e1a849e9e27e259803ee5c4365cb719e54af70a43c06358ec268ff4ebf + languageName: node + linkType: hard + +"webpackbar@npm:^5.0.2": + version: 5.0.2 + resolution: "webpackbar@npm:5.0.2" + dependencies: + chalk: "npm:^4.1.0" + consola: "npm:^2.15.3" + pretty-time: "npm:^1.1.0" + std-env: "npm:^3.0.1" + peerDependencies: + webpack: 3 || 4 || 5 + checksum: 10c0/336568a6ed1c1ad743c8d20a5cab5875a7ebe1e96181f49ae0a1a897f1a59d1661d837574a25d8ba9dfa4f2f705bd46ca0cd037ff60286ff70fb8d9db2b0c123 + languageName: node + linkType: hard + +"websocket-driver@npm:>=0.5.1, websocket-driver@npm:^0.7.4": + version: 0.7.4 + resolution: "websocket-driver@npm:0.7.4" + dependencies: + http-parser-js: "npm:>=0.5.1" + safe-buffer: "npm:>=5.1.0" + websocket-extensions: "npm:>=0.1.1" + checksum: 10c0/5f09547912b27bdc57bac17b7b6527d8993aa4ac8a2d10588bb74aebaf785fdcf64fea034aae0c359b7adff2044dd66f3d03866e4685571f81b13e548f9021f1 + languageName: node + linkType: hard + +"websocket-extensions@npm:>=0.1.1": + version: 0.1.4 + resolution: "websocket-extensions@npm:0.1.4" + checksum: 10c0/bbc8c233388a0eb8a40786ee2e30d35935cacbfe26ab188b3e020987e85d519c2009fe07cfc37b7f718b85afdba7e54654c9153e6697301f72561bfe429177e0 + languageName: node + linkType: hard + +"which-boxed-primitive@npm:^1.0.2": + version: 1.0.2 + resolution: "which-boxed-primitive@npm:1.0.2" + dependencies: + is-bigint: "npm:^1.0.1" + is-boolean-object: "npm:^1.1.0" + is-number-object: "npm:^1.0.4" + is-string: "npm:^1.0.5" + is-symbol: "npm:^1.0.3" + checksum: 10c0/0a62a03c00c91dd4fb1035b2f0733c341d805753b027eebd3a304b9cb70e8ce33e25317add2fe9b5fea6f53a175c0633ae701ff812e604410ddd049777cd435e + languageName: node + linkType: hard + +"which-builtin-type@npm:^1.1.3": + version: 1.1.4 + resolution: "which-builtin-type@npm:1.1.4" + dependencies: + function.prototype.name: "npm:^1.1.6" + has-tostringtag: "npm:^1.0.2" + is-async-function: "npm:^2.0.0" + is-date-object: "npm:^1.0.5" + is-finalizationregistry: "npm:^1.0.2" + is-generator-function: "npm:^1.0.10" + is-regex: "npm:^1.1.4" + is-weakref: "npm:^1.0.2" + isarray: "npm:^2.0.5" + which-boxed-primitive: "npm:^1.0.2" + which-collection: "npm:^1.0.2" + which-typed-array: "npm:^1.1.15" + checksum: 10c0/a4a76d20d869a81b1dbb4adea31edc7e6c1a4466d3ab7c2cd757c9219d48d3723b04076c85583257b0f0f8e3ebe5af337248b8ceed57b9051cb97bce5bd881d1 + languageName: node + linkType: hard + +"which-collection@npm:^1.0.2": + version: 1.0.2 + resolution: "which-collection@npm:1.0.2" + dependencies: + is-map: "npm:^2.0.3" + is-set: "npm:^2.0.3" + is-weakmap: "npm:^2.0.2" + is-weakset: "npm:^2.0.3" + checksum: 10c0/3345fde20964525a04cdf7c4a96821f85f0cc198f1b2ecb4576e08096746d129eb133571998fe121c77782ac8f21cbd67745a3d35ce100d26d4e684c142ea1f2 + languageName: node + linkType: hard + +"which-typed-array@npm:^1.1.14, which-typed-array@npm:^1.1.15": + version: 1.1.15 + resolution: "which-typed-array@npm:1.1.15" + dependencies: + available-typed-arrays: "npm:^1.0.7" + call-bind: "npm:^1.0.7" + for-each: "npm:^0.3.3" + gopd: "npm:^1.0.1" + has-tostringtag: "npm:^1.0.2" + checksum: 10c0/4465d5348c044032032251be54d8988270e69c6b7154f8fcb2a47ff706fe36f7624b3a24246b8d9089435a8f4ec48c1c1025c5d6b499456b9e5eff4f48212983 + languageName: node + linkType: hard + +"which@npm:^1.3.1": + version: 1.3.1 + resolution: "which@npm:1.3.1" + dependencies: + isexe: "npm:^2.0.0" + bin: + which: ./bin/which + checksum: 10c0/e945a8b6bbf6821aaaef7f6e0c309d4b615ef35699576d5489b4261da9539f70393c6b2ce700ee4321c18f914ebe5644bc4631b15466ffbaad37d83151f6af59 + languageName: node + linkType: hard + +"which@npm:^2.0.1": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: "npm:^2.0.0" + bin: + node-which: ./bin/node-which + checksum: 10c0/66522872a768b60c2a65a57e8ad184e5372f5b6a9ca6d5f033d4b0dc98aff63995655a7503b9c0a2598936f532120e81dd8cc155e2e92ed662a2b9377cc4374f + languageName: node + linkType: hard + +"which@npm:^4.0.0": + version: 4.0.0 + resolution: "which@npm:4.0.0" + dependencies: + isexe: "npm:^3.1.1" + bin: + node-which: bin/which.js + checksum: 10c0/449fa5c44ed120ccecfe18c433296a4978a7583bf2391c50abce13f76878d2476defde04d0f79db8165bdf432853c1f8389d0485ca6e8ebce3bbcded513d5e6a + languageName: node + linkType: hard + +"widest-line@npm:^4.0.1": + version: 4.0.1 + resolution: "widest-line@npm:4.0.1" + dependencies: + string-width: "npm:^5.0.1" + checksum: 10c0/7da9525ba45eaf3e4ed1a20f3dcb9b85bd9443962450694dae950f4bdd752839747bbc14713522b0b93080007de8e8af677a61a8c2114aa553ad52bde72d0f9c + languageName: node + linkType: hard + +"wildcard@npm:^2.0.0": + version: 2.0.1 + resolution: "wildcard@npm:2.0.1" + checksum: 10c0/08f70cd97dd9a20aea280847a1fe8148e17cae7d231640e41eb26d2388697cbe65b67fd9e68715251c39b080c5ae4f76d71a9a69fa101d897273efdfb1b58bf7 + languageName: node + linkType: hard + +"word-wrap@npm:^1.2.5": + version: 1.2.5 + resolution: "word-wrap@npm:1.2.5" + checksum: 10c0/e0e4a1ca27599c92a6ca4c32260e8a92e8a44f4ef6ef93f803f8ed823f486e0889fc0b93be4db59c8d51b3064951d25e43d434e95dc8c960cc3a63d65d00ba20 + languageName: node + linkType: hard + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version: 7.0.0 + resolution: "wrap-ansi@npm:7.0.0" + dependencies: + ansi-styles: "npm:^4.0.0" + string-width: "npm:^4.1.0" + strip-ansi: "npm:^6.0.0" + checksum: 10c0/d15fc12c11e4cbc4044a552129ebc75ee3f57aa9c1958373a4db0292d72282f54373b536103987a4a7594db1ef6a4f10acf92978f79b98c49306a4b58c77d4da + languageName: node + linkType: hard + +"wrap-ansi@npm:^8.0.1, wrap-ansi@npm:^8.1.0": + version: 8.1.0 + resolution: "wrap-ansi@npm:8.1.0" + dependencies: + ansi-styles: "npm:^6.1.0" + string-width: "npm:^5.0.1" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/138ff58a41d2f877eae87e3282c0630fc2789012fc1af4d6bd626eeb9a2f9a65ca92005e6e69a75c7b85a68479fe7443c7dbe1eb8fbaa681a4491364b7c55c60 + languageName: node + linkType: hard + +"wrappy@npm:1": + version: 1.0.2 + resolution: "wrappy@npm:1.0.2" + checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 + languageName: node + linkType: hard + +"write-file-atomic@npm:^3.0.3": + version: 3.0.3 + resolution: "write-file-atomic@npm:3.0.3" + dependencies: + imurmurhash: "npm:^0.1.4" + is-typedarray: "npm:^1.0.0" + signal-exit: "npm:^3.0.2" + typedarray-to-buffer: "npm:^3.1.5" + checksum: 10c0/7fb67affd811c7a1221bed0c905c26e28f0041e138fb19ccf02db57a0ef93ea69220959af3906b920f9b0411d1914474cdd90b93a96e5cd9e8368d9777caac0e + languageName: node + linkType: hard + +"ws@npm:^7.3.1": + version: 7.5.10 + resolution: "ws@npm:7.5.10" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: 10c0/bd7d5f4aaf04fae7960c23dcb6c6375d525e00f795dd20b9385902bd008c40a94d3db3ce97d878acc7573df852056ca546328b27b39f47609f80fb22a0a9b61d + languageName: node + linkType: hard + +"ws@npm:^8.13.0": + version: 8.18.0 + resolution: "ws@npm:8.18.0" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: 10c0/25eb33aff17edcb90721ed6b0eb250976328533ad3cd1a28a274bd263682e7296a6591ff1436d6cbc50fa67463158b062f9d1122013b361cec99a05f84680e06 + languageName: node + linkType: hard + +"xdg-basedir@npm:^5.0.1, xdg-basedir@npm:^5.1.0": + version: 5.1.0 + resolution: "xdg-basedir@npm:5.1.0" + checksum: 10c0/c88efabc71ffd996ba9ad8923a8cc1c7c020a03e2c59f0ffa72e06be9e724ad2a0fccef488757bc6ed3d8849d753dd25082d1035d95cb179e79eae4d034d0b80 + languageName: node + linkType: hard + +"xml-js@npm:^1.6.11": + version: 1.6.11 + resolution: "xml-js@npm:1.6.11" + dependencies: + sax: "npm:^1.2.4" + bin: + xml-js: ./bin/cli.js + checksum: 10c0/c83631057f10bf90ea785cee434a8a1a0030c7314fe737ad9bf568a281083b565b28b14c9e9ba82f11fc9dc582a3a907904956af60beb725be1c9ad4b030bc5a + languageName: node + linkType: hard + +"yallist@npm:^3.0.2": + version: 3.1.1 + resolution: "yallist@npm:3.1.1" + checksum: 10c0/c66a5c46bc89af1625476f7f0f2ec3653c1a1791d2f9407cfb4c2ba812a1e1c9941416d71ba9719876530e3340a99925f697142989371b72d93b9ee628afd8c1 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yaml@npm:^1.7.2": + version: 1.10.2 + resolution: "yaml@npm:1.10.2" + checksum: 10c0/5c28b9eb7adc46544f28d9a8d20c5b3cb1215a886609a2fd41f51628d8aaa5878ccd628b755dbcd29f6bb4921bd04ffbc6dcc370689bb96e594e2f9813d2605f + languageName: node + linkType: hard + +"yaml@npm:^2.0.0": + version: 2.5.0 + resolution: "yaml@npm:2.5.0" + bin: + yaml: bin.mjs + checksum: 10c0/771a1df083c8217cf04ef49f87244ae2dd7d7457094425e793b8f056159f167602ce172aa32d6bca21f787d24ec724aee3cecde938f6643564117bd151452631 + languageName: node + linkType: hard + +"yocto-queue@npm:^0.1.0": + version: 0.1.0 + resolution: "yocto-queue@npm:0.1.0" + checksum: 10c0/dceb44c28578b31641e13695d200d34ec4ab3966a5729814d5445b194933c096b7ced71494ce53a0e8820685d1d010df8b2422e5bf2cdea7e469d97ffbea306f + languageName: node + linkType: hard + +"yocto-queue@npm:^1.0.0": + version: 1.1.1 + resolution: "yocto-queue@npm:1.1.1" + checksum: 10c0/cb287fe5e6acfa82690acb43c283de34e945c571a78a939774f6eaba7c285bacdf6c90fbc16ce530060863984c906d2b4c6ceb069c94d1e0a06d5f2b458e2a92 + languageName: node + linkType: hard + +"zwitch@npm:^2.0.0": + version: 2.0.4 + resolution: "zwitch@npm:2.0.4" + checksum: 10c0/3c7830cdd3378667e058ffdb4cf2bb78ac5711214e2725900873accb23f3dfe5f9e7e5a06dcdc5f29605da976fc45c26d9a13ca334d6eea2245a15e77b8fc06e + languageName: node + linkType: hard diff --git a/docs/markdoc-component-documentation.md b/docs/markdoc-component-documentation.md new file mode 100644 index 0000000000000..aeb6738bb2b38 --- /dev/null +++ b/docs/markdoc-component-documentation.md @@ -0,0 +1,433 @@ +--- +title: Authoring Component Examples +description: This page shows off what each of the components we're using in our docs looks like. +--- + + + +This page contains examples of all of the native Markdoc node and the custom markdoc tags that we are using in our docs. + +To see the rendered version of these tags, move this file into the `next/pages/getting-started` directory, run the local server for the site, and navigate to the `getting-started/markdoc-component-documentation` page in your browser. + +# Header 1 + +## Header 2 + +### Header 3 + +#### Header 4 + +##### Header 5 + +## Regular text + +When the wire was despatched he had a cup of tea; over it he told me of a diary kept by Jonathan Harker when abroad, and gave me a typewritten copy of it, as also of Mrs. Harker's diary at Whitby. "Take these," he said, "and study them well. When I have returned you will be master of all the facts, and we can then better enter on our inquisition. Keep them safe, for there is in them much of treasure. You will need all your faith, even you who have had such an experience as that of to-day. What is here told," he laid his hand heavily and gravely on the packet of papers as he spoke, "may be the beginning of the end to you and me and many another; or it may sound the knell of the Un-Dead who walk the earth. Read all, I pray you, with the open mind; and if you can add in any way to the story here told do so, for it is all-important. You have kept diary of all these so strange things; is it not so? Yes! Then we shall go through all these together when we meet." He then made ready for his departure, and shortly after drove off to Liverpool Street. I took my way to Paddington, where I arrived about fifteen minutes before the train came in. + +## Complex Header Where You Definitely Want to Use an Anchor Link {% #complex-header %} + +``` +## Complex Header Where You Definitely Want to Use an +Anchor Link {\% #complex-header %} +``` + +# Vanilla Markdown Nodes + +**Bold** + +_Italic_ + +[Links](/docs/nodes) + +Vanilla markdown images work, but we should really be using the image tag because it optimizes the images, makes them more accessible, and handles resizing them for us. +![Images](/images/concepts/assets/asset-activity-observation.png) + +Lists +- Item 1 +- Item 1 +- Item 1 + +> Quotes + +`Inline code` + +``` +Code fences +``` + +Markdoc doesn't have syntax highlighting out of the box. We're going to need to add that in. When it get's added in, this python code should be beautifully highlighted. + +### BASH +```bash +python --version +pip --version +pip install dagster dagster-webserver +``` + +### Python +```python +import random + +def magic_8_ball(): + responses = [ + "As I see it, yes.", + "Ask again later.", + "Better not tell you now.", + "Cannot predict now.", + "Don’t count on it.", + "My reply is no.", + "My sources say no.", + "Outlook not so good.", + "Outlook good.", + "Reply hazy, try again.", + "Yes – definitely.", + "You may rely on it.", + "Absolutely not!", + "Go for it!", + "No way, José!", + "Oh, hell no!", + "Hell yes!", + "Yes, if you believe hard enough.", + "No, but that shouldn't stop you.", + "Why would you even ask that?" + ] + + while True: + question = input("Ask the Magic 8 Ball a question (type 'exit' to quit): ") + if question.lower() == 'exit': + print("Magic 8 Ball has left the chat.") + break + else: + print(random.choice(responses)) + +if __name__ == "__main__": + magic_8_ball() +``` + +### JSON +```json +{ + "name": "Dagster", + "description": "A data orchestrator for machine learning, analytics, and ETL", + "version": "0.13.0", + "license": "Apache-2.0", + "repository": { + "type": "git", + } +} +``` + +### YAML +```yaml +name: Dagster +description: A data orchestrator for machine learning, analytics, and ETL +version: 0.13.0 +license: Apache-2.0 +repository: + type: git +``` + +### TOML +```toml +[package] +name = "dagster" +version = "0.13.0" +description = "A data orchestrator for machine learning, analytics, and ETL" +license = "Apache-2.0" +repository = { type = "git" } +``` + + +# Custom Markdoc Tags +We've extended markdown with custom tags that let us make the docs richer and more interactive. + +{% warning %} +There are two types of tags. +- Inline tags can be used in the middle of a line of and look like this `{% inlineTag %} Inline tag contents {% /inline tags %}` +- Block tags can be used to wrap around a block of content and look like this: + +`{% blockTag %}` + +`Block tag contents` + +`{% /blockTag %}` + +The docs for each tag calls out whether it can be used inline, as a block, or both. + +{% /warning %} + +## Admonitions : Block + +Two types of admonitions are available. Warnings and notes. + +`{% warning %} This is a warning {% /warning %}` + +{% warning %} +This is a warning +{% /warning %} + +`{% note %} This is a note {% /note %}` + +{% note %} +This is a note +{% /note %} + +## Button : Block +Buttons are basically just links that are styled to look neat to the user and work for making links stand out. + +We have a few different styles of buttons that all do the same thing. + +### Default + +`{% button link="https://dog.ceo/" %} Click Me! {% /button %}` + +{% button link="https://dog.ceo/" %} +Click Me! +{% /button %} + +### Primary + +`{% button link="https://dog.ceo/" style="primary" %} Click Me! {% /button %}` + +{% button link="https://dog.ceo/" style="primary" %} +Click Me! +{% /button %} + +### Secondary + +`{% button link="https://dog.ceo/" style="secondary" %} Click Me! {% /button %}` + +{% button link="https://dog.ceo/" style="secondary" %} +Click Me! +{% /button %} + +### Blurple + +` {% button link="https://dog.ceo/" style="blurple" %} Click Me! {% /button %} ` + +{% button link="https://dog.ceo/" style="blurple" %} +Click Me! +{% /button %} + +### Button Container : Block +The main use case I've seen for buttons in the docs is basically setting up multiple styling links in a row. Doing this requires putting buttons into a `ButtonContainer` + +{% buttonContainer %} +{% button link="https://dog.ceo/" style="primary" %} +Click Me! +{% /button %} +{% button link="https://dog.ceo/" style="secondary" %} +Click Me! +{% /button %} +{% button link="https://dog.ceo/" style="blurple" %} +Click Me! +{% /button %} +{% /buttonContainer %} + +## Crosses and Checks : Inline +You can invoke a cross with this tag `{% cross /}%` and it looks like this {% cross /%}. You can invoke checks with this tag `{% check /}%` and it looks like this {% check /%}. + +Crosses and checks can also be used in lists: +- {% check /%} Completed task +- {% cross /%} Incomplete task +- {% cross /%} Super Incomplete task + +You can also put crosses and checks into headers + +### This is a header with a check {% check /%} +Which is pretty neat. + +### This is a header with a cross {% cross /%} +Which is also pretty neat. + +## Images +In the old MDX way of doing things, images were a bit of a pain. You had to either manually specify the height and width of the image or run a script (`make MDX-format`) to do it for you. + +Document preprocessors like this have a number of issues such as being slow, error-prone, and making edits to the docs more difficult. + +To that end, I've opted to extended the default markdoc image node to automatically determine the height and width of the image during the transform step and use that to instantiate the image using the `Next/Image` component. Doing this allows us to retain the benefits of our current make script approach while also preserving the easy authoring experience of vanilla markdown. + +So, the default way to use images is like this: + +`![Alt Text Goes Here](/images/concepts/assets/asset-activity-observation.png)` + +and outputs like this: + +![Alt Text Goes Here](/images/concepts/assets/asset-activity-observation.png) + +The default markdown syntax is effectively the same as using this tag manually: `{% image src="/images/concepts/assets/asset-activity-observation.png" alt="Text go here" /%}` + +Which yields this: + +{% image src="/images/concepts/assets/asset-activity-observation.png" alt="Text go here" /%} + +You can also specify the width and height of the image like this: + +`{% image src="/images/concepts/assets/asset-activity-observation.png" width=1758 height=1146 alt="Text go here" /%}` + +Which yields this: + +{% image src="/images/concepts/assets/asset-activity-observation.png" width=1758 height=1146 alt="Text go here" /%} + +The cool part about all of this is that it removes the need to run `make MDX-format` for images as it handles assigning a size to the image as part of page rendering rather than as a batch text-preprocess that gets performed on the docs. + +## Badges : Inline + +`{% badge text="Badgey the Badger" /%}` lets you put a custom badge onto the page like this. {% badge text="Badgey the Badger" /%} + +### API Status Badges +We've also got a bunch of badges that you can use to indicate the level of support an API or feature has. + +There are three types of badges: + +- `{% experimental /%}` {% experimental /%} +- `{% deprecated /%}` {% deprecated /%} +- `{% legacy /%}` {% legacy /%} + +## Code Reference Links : Block + +Code reference links let you point to a specific file in the codebase. They're useful for linking to examples or reference implementations. + +`{% codeReferenceLink filePath="examples/deploy_ecs" /%}` + +{% codeReferenceLink filePath="examples/deploy_ecs" /%} + +## Reference Tables : Block + +{% referenceTable %} +{% referenceTableItem propertyName="isolated_agents.enabled" %} +The cool thing about these tables that will spare people working with them a lot of grief is that they support vanilla markdown and custom tags while the MDX implementation only supported HTML. + +They still aren't the best thing to work with, but this is a night and day different from having to keep track of raw HTML while authoring. +{% /referenceTableItem %} + +{% referenceTableItem propertyName="Item 2" %} + +**Bold** + +_Italic_ + +[Links](/docs/nodes) + +Vanilla markdown images work, but we should really be using the image tag because it optimizes the images, makes them more accessible, and handles resizing them for us. + + +Lists +- Item 1 +- Item 1 +- Item 1 + +> Quotes + +`Inline code` + +``` +Code fences +``` +{% /referenceTableItem %} + +{% /referenceTable %} + +## Article Lists : Block + +Authors can use article lists. + +They work like this: +`{% articleList %}` + + `{% articleListItem title="Software-defined Assets" href="/concepts/assets/software-defined-assets" /%}` + + `{% articleListItem title="Graph-backed Assets" href="/concepts/assets/graph-backed-assets" /%}` + + `{% articleListItem title="Multi-assets" href="/concepts/assets/multi-assets" /%}` + + `{% articleListItem title="Asset jobs" href="/concepts/assets/asset-jobs" /%}` + + `{% articleListItem title="Asset observations" href="/concepts/assets/asset-observations" /%}` + + `{% articleListItem title="Asset selection syntax" href="/concepts/assets/asset-selection-syntax" /%}` + + `{% articleListItem title="Asset checks" href="/concepts/assets/asset-checks" /%}` + + `{% articleListItem title="External assets (Experimental)" href="/concepts/assets/external-assets" /%}` + +`{% /articleList %}` + +{% articleList %} + {% articleListItem title="Software-defined Assets" href="/concepts/assets/software-defined-assets" /%} + {% articleListItem title="Graph-backed Assets" href="/concepts/assets/graph-backed-assets" /%} + {% articleListItem title="Multi-assets" href="/concepts/assets/multi-assets" /%} + {% articleListItem title="Asset jobs" href="/concepts/assets/asset-jobs" /%} + {% articleListItem title="Asset observations" href="/concepts/assets/asset-observations" /%} + {% articleListItem title="Asset selection syntax" href="/concepts/assets/asset-selection-syntax" /%} + {% articleListItem title="Asset checks" href="/concepts/assets/asset-checks" /%} + {% articleListItem title="External assets (Experimental)" href="/concepts/assets/external-assets" /%} +{% /articleList %} + +## CodeSnippets : Block + +The `CodeSnippet` component allows you to easily include code snippets from your project files into your documentation. Here are various ways to use it, along with examples: + +### Basic File Retrieval +This example shows how to include an entire file as a code snippet. It's useful when you want to showcase a complete file without any modifications. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" /%} + +### Specific Line Range +You can specify exact lines to include from a file. This is helpful when you want to focus on a particular section of code without showing the entire file. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="5-15" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="5-15" /%} + +### Multiple Line Ranges +For more complex scenarios, you can include multiple, non-contiguous line ranges. This allows you to showcase different parts of a file while skipping irrelevant sections. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="1-5,10-15" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="1-5,10-15" /%} + +### Start After a Specific String +This option lets you start the snippet after a specific string in the file. It's useful for beginning your snippet at a particular point, like after a comment or function definition. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" startafter="# start_example" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" startafter="# start_example" /%} + +### End Before a Specific String +Similar to `startafter`, this option lets you end the snippet before a specific string. It's helpful for showing code up to a certain point. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" endbefore="# end_example" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" endbefore="# end_example" /%} + +### Combine Start and End +You can use both `startafter` and `endbefore` to extract a specific section of code between two markers. This is great for showcasing a particular function or code block. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" startafter="# start_example" endbefore="# end_example" /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" startafter="# start_example" endbefore="# end_example" /%} + +### Dedenting +The `dedent` option allows you to remove a specified number of leading spaces from each line. This is useful for adjusting the indentation of your snippet to match your documentation's formatting. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" dedent=4 /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" dedent=4 /%} + +### Disable Trimming +By default, the component trims whitespace from the beginning and end of the snippet. You can disable this behavior if you need to preserve exact whitespace. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" trim=false /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" trim=false /%} + +### Combining Multiple Parameters +You can combine multiple parameters for fine-grained control over your snippets. This example shows how to select specific lines, start after a marker, dedent, and trim the result. + +`{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="5-15" startafter="# start_example" dedent=4 trim=true /%}` + +{% codeSnippet file="concepts/assets/asset_group_argument.py" lang="python" lines="5-15" startafter="# start_example" dedent=4 trim=true /%} + +By using these options, you can flexibly include and format code snippets to best suit your needs. \ No newline at end of file diff --git a/docs/next/.versioned_content/_versions_with_static_links.json b/docs/next/.versioned_content/_versions_with_static_links.json index cd40b1ddcb750..723c22d4f86de 100644 --- a/docs/next/.versioned_content/_versions_with_static_links.json +++ b/docs/next/.versioned_content/_versions_with_static_links.json @@ -414,5 +414,161 @@ { "url": "https://release-1-6-2.dagster.dagster-docs.io/", "version": "1.6.2" + }, + { + "url": "https://release-1-6-3.dagster.dagster-docs.io/", + "version": "1.6.3" + }, + { + "url": "https://release-1-6-4.dagster.dagster-docs.io/", + "version": "1.6.4" + }, + { + "url": "https://release-1-6-5.dagster.dagster-docs.io/", + "version": "1.6.5" + }, + { + "url": "https://release-1-6-6.dagster.dagster-docs.io/", + "version": "1.6.6" + }, + { + "url": "https://release-1-6-7.dagster.dagster-docs.io/", + "version": "1.6.7" + }, + { + "url": "https://release-1-6-8.dagster.dagster-docs.io/", + "version": "1.6.8" + }, + { + "url": "https://release-1-6-9.dagster.dagster-docs.io/", + "version": "1.6.9" + }, + { + "url": "https://release-1-6-10.dagster.dagster-docs.io/", + "version": "1.6.10" + }, + { + "url": "https://release-1-6-11.dagster.dagster-docs.io/", + "version": "1.6.11" + }, + { + "url": "https://release-1-6-12.dagster.dagster-docs.io/", + "version": "1.6.12" + }, + { + "url": "https://release-1-6-13.dagster.dagster-docs.io/", + "version": "1.6.13" + }, + { + "url": "https://release-1-6-14.dagster.dagster-docs.io/", + "version": "1.6.14" + }, + { + "url": "https://release-1-7-0.dagster.dagster-docs.io/", + "version": "1.7.0" + }, + { + "url": "https://release-1-7-1.dagster.dagster-docs.io/", + "version": "1.7.1" + }, + { + "url": "https://release-1-7-2.dagster.dagster-docs.io/", + "version": "1.7.2" + }, + { + "url": "https://release-1-7-3.dagster.dagster-docs.io/", + "version": "1.7.3" + }, + { + "url": "https://release-1-7-4.dagster.dagster-docs.io/", + "version": "1.7.4" + }, + { + "url": "https://release-1-7-5.dagster.dagster-docs.io/", + "version": "1.7.5" + }, + { + "url": "https://release-1-7-6.dagster.dagster-docs.io/", + "version": "1.7.6" + }, + { + "url": "https://release-1-7-7.dagster.dagster-docs.io/", + "version": "1.7.7" + }, + { + "url": "https://release-1-7-8.dagster.dagster-docs.io/", + "version": "1.7.8" + }, + { + "url": "https://release-1-7-9.dagster.dagster-docs.io/", + "version": "1.7.9" + }, + { + "url": "https://release-1-7-10.dagster.dagster-docs.io/", + "version": "1.7.10" + }, + { + "url": "https://release-1-7-11.dagster.dagster-docs.io/", + "version": "1.7.11" + }, + { + "url": "https://release-1-7-12.dagster.dagster-docs.io/", + "version": "1.7.12" + }, + { + "url": "https://release-1-7-13.dagster.dagster-docs.io/", + "version": "1.7.13" + }, + { + "url": "https://release-1-7-14.dagster.dagster-docs.io/", + "version": "1.7.14" + }, + { + "url": "https://release-1-7-15.dagster.dagster-docs.io/", + "version": "1.7.15" + }, + { + "url": "https://release-1-7-16.dagster.dagster-docs.io/", + "version": "1.7.16" + }, + { + "url": "https://release-1-8-0.dagster.dagster-docs.io/", + "version": "1.8.0" + }, + { + "url": "https://release-1-8-1.dagster.dagster-docs.io/", + "version": "1.8.1" + }, + { + "url": "https://release-1-8-2.dagster.dagster-docs.io/", + "version": "1.8.2" + }, + { + "url": "https://release-1-8-3.dagster.dagster-docs.io/", + "version": "1.8.3" + }, + { + "url": "https://release-1-8-4.dagster.dagster-docs.io/", + "version": "1.8.4" + }, + { + "url": "https://release-1-8-5.dagster.dagster-docs.io/", + "version": "1.8.5" + }, + { + "url": "https://release-1-8-6.dagster.dagster-docs.io/", + "version": "1.8.6" + }, + { + "url": "https://release-1-8-7.dagster.dagster-docs.io/", + "version": "1.8.7" + }, + { + "url": "https://release-1-8-8.dagster.dagster-docs.io/", + "version": "1.8.8" + }, + { + "url": "https://release-1-8-9.dagster.dagster-docs.io/", + "version": "1.8.9" } ] \ No newline at end of file diff --git a/docs/next/components/FeedbackModal.tsx b/docs/next/components/FeedbackModal.tsx index 53ac38d78b897..222dd1186757e 100644 --- a/docs/next/components/FeedbackModal.tsx +++ b/docs/next/components/FeedbackModal.tsx @@ -40,7 +40,7 @@ const FeedbackModal = ({isOpen, closeFeedback}: {isOpen: boolean; closeFeedback: return (
      { } lastScrollY.current = window.scrollY; - - if (window.scrollY > 0) { - setIsCollapsed(true); - } else { - setIsCollapsed(false); - } + setIsCollapsed(window.scrollY > 0); }; document.addEventListener('scroll', handler); @@ -78,7 +74,9 @@ const Header = ({openMobileDocsMenu}) => { href="https://dagster.io" className="flex-shrink-0 flex items-center z-50 w-9/12 justify-center lg:justify-start lg:w-3/12" > - { )} > - Platform - - - Cloud{' '} -
      - New -
      -
      - - Pricing + Docs Blog Community - Docs + University
      { )} > - - - +
      @@ -165,7 +156,7 @@ const Header = ({openMobileDocsMenu}) => { href="https://dagster.cloud/signup" className="hidden 3xl:inline-flex px-4 py-1 border border-blurple bg-blurple cursor-pointer hover:bg-blurple-darker text-white gap-2 leading-8 items-center rounded-full whitespace-nowrap transition" > - Try Dagster Cloud + Try Dagster+
      @@ -211,46 +202,34 @@ const Header = ({openMobileDocsMenu}) => { > diff --git a/docs/next/components/Icons.tsx b/docs/next/components/Icons.tsx index 215c26e31cbb4..ba3be9b61e3d6 100644 --- a/docs/next/components/Icons.tsx +++ b/docs/next/components/Icons.tsx @@ -408,9 +408,21 @@ const AboutIcon = ( ); +const PlusIcon = ( + + + +); + const Icons = { // This should be an append only list. Do not remove an icon here, since - // an older version of the site coul dbe depending on it. + // an older version of the site could be depending on it. BookOpen: BookOpenIcon, Menu: MenuIcon, AcademicCap: AcademicCapIcon, @@ -438,6 +450,7 @@ const Icons = { Community: CommunityIcon, Reference: ReferenceIcon, About: AboutIcon, + Plus: PlusIcon, }; export default Icons; diff --git a/docs/next/components/PagePagination.tsx b/docs/next/components/PagePagination.tsx index d7f543a25efbe..32413645e1d8f 100644 --- a/docs/next/components/PagePagination.tsx +++ b/docs/next/components/PagePagination.tsx @@ -135,24 +135,22 @@ export function PagePagination(props: {currentPageIndex: number; totalPageCount: href={{ query: {...query, page: currentPageIndex + 2}, }} + className="inline-flex items-center border-t-2 border-transparent pl-1 pt-4 text-sm font-medium text-gray-500 hover:border-gray-300 hover:text-gray-700" > - - Next - {/* Heroicon name: arrow-narrow-right */} - - + Next{/* Heroicon name: arrow-narrow-right */} + )} @@ -172,16 +170,13 @@ function PaginationItem(props: {targetIndex: number; isCurrentPage?: boolean}) { href={{ query: {...query, page: targetIndex + 1}, }} + className={cx({ + 'inline-flex items-center border-t-2 border-transparent px-4 pt-4 text-sm font-medium text-gray-500 hover:border-gray-300 hover:text-gray-700': + true, + 'border-indigo-500 text-indigo-600': isCurrentPage, + })} > - - {targetIndex + 1} - + {targetIndex + 1} ); } diff --git a/docs/next/components/Sidebar.tsx b/docs/next/components/Sidebar.tsx index d446de63ba042..639dd418f2c62 100644 --- a/docs/next/components/Sidebar.tsx +++ b/docs/next/components/Sidebar.tsx @@ -86,9 +86,9 @@ const MenuItem = React.forwardRef - {itemContents} - + {rightIcon()} ); - - if (item.isExternalLink) { - return linkElement; - } - - return ( - - {linkElement} - - ); }, ); diff --git a/docs/next/components/markdoc/ArticleList.tsx b/docs/next/components/markdoc/ArticleList.tsx new file mode 100644 index 0000000000000..0127a710c81f3 --- /dev/null +++ b/docs/next/components/markdoc/ArticleList.tsx @@ -0,0 +1,42 @@ +import {unpackText} from 'util/unpackText'; + +import NextLink from 'next/link'; + +import Link from '../Link'; + +export const ArticleList = ({children}) => { + return ( +
      +
      +
        + {unpackText(children)} +
      +
      +
      + ); +}; + +export const ArticleListItem = ({title, href}) => { + return ( +
    • + {href.startsWith('http') ? ( + + {title} + + ) : ( + {title} + )} +
    • + ); +}; diff --git a/docs/next/components/markdoc/Badges.tsx b/docs/next/components/markdoc/Badges.tsx new file mode 100644 index 0000000000000..c8b314ef2f55c --- /dev/null +++ b/docs/next/components/markdoc/Badges.tsx @@ -0,0 +1,36 @@ +import {getColorForString} from '../../util/getColorForString'; + +export const Badge = ({text}) => { + const colors = getColorForString(text); + return ( + + {text} + + ); +}; + +export const Experimental = () => { + return ( +
      + (Experimental) +
      + ); +}; + +export const Deprecated = () => { + return ( +
      + (Deprecated) +
      + ); +}; + +export const Legacy = () => { + return ( +
      + (Legacy) +
      + ); +}; diff --git a/docs/next/components/markdoc/Button.tsx b/docs/next/components/markdoc/Button.tsx new file mode 100644 index 0000000000000..0f6bb51776369 --- /dev/null +++ b/docs/next/components/markdoc/Button.tsx @@ -0,0 +1,40 @@ +import {unpackText} from 'util/unpackText'; + +import cx from 'classnames'; +import * as React from 'react'; + +export const ButtonContainer = ({children}: {children: any}) => { + const buttons = React.Children.toArray(children); + return ( +
      + {...buttons} +
      + ); +}; + +export const Button = ({ + link, + style = 'primary', + children, +}: { + children: any; + link: string; + style?: 'primary' | 'secondary' | 'blurple'; +}) => { + return ( + + ); +}; diff --git a/docs/next/components/markdoc/Callouts.tsx b/docs/next/components/markdoc/Callouts.tsx index aba9532fa2aba..f042de67aca6c 100644 --- a/docs/next/components/markdoc/Callouts.tsx +++ b/docs/next/components/markdoc/Callouts.tsx @@ -1,4 +1,4 @@ -// import * as React from 'react'; +import * as React from 'react'; import Icons from '../Icons'; @@ -16,14 +16,24 @@ const ADMONITION_STYLES = { icon: Icons.About, }, }; + +const applyTextStyles = (children, colors) => { + return React.Children.map(children, (child) => { + const existingStyles = child.props.className || ''; + const newStyles = `text-sm text-${colors.text} ${existingStyles}`; + return React.cloneElement(child, {className: newStyles}); + }); +}; + const Admonition = ({style, children}) => { const {colors, icon} = ADMONITION_STYLES[style]; return ( -
      -
      +
      +
      + {/* Make container for the svg element that aligns it with the top right of the parent flex container */}
      { {icon && icon}
      -
      - {children} +
      +
      {applyTextStyles(children, colors)}
      diff --git a/docs/next/components/markdoc/CheckCross.tsx b/docs/next/components/markdoc/CheckCross.tsx new file mode 100644 index 0000000000000..37b53ac1fdca1 --- /dev/null +++ b/docs/next/components/markdoc/CheckCross.tsx @@ -0,0 +1,33 @@ +export const Check = () => { + return ( + + + + ); +}; + +export const Cross = () => { + return ( + + + + ); +}; diff --git a/docs/next/components/markdoc/CodeReferenceLink.tsx b/docs/next/components/markdoc/CodeReferenceLink.tsx new file mode 100644 index 0000000000000..43317855278ed --- /dev/null +++ b/docs/next/components/markdoc/CodeReferenceLink.tsx @@ -0,0 +1,25 @@ +import {unpackText} from 'util/unpackText'; +import {LATEST_VERSION} from 'util/version'; + +import React from 'react'; + +export const CodeReferenceLink = ({filePath, isInline, children}) => { + const url = `https://github.com/dagster-io/dagster/tree/${LATEST_VERSION}/${filePath}`; + + if (isInline) { + return {unpackText(children)}; + } else { + return ( +
      +
      + + + +
      +
      + You can find the code for this example on Github +
      +
      + ); + } +}; diff --git a/docs/next/components/markdoc/CodeSnippet.tsx b/docs/next/components/markdoc/CodeSnippet.tsx new file mode 100644 index 0000000000000..3f4b3df00542d --- /dev/null +++ b/docs/next/components/markdoc/CodeSnippet.tsx @@ -0,0 +1,58 @@ +import React, { useState, useEffect } from 'react'; +import { Fence } from './FencedCodeBlock'; + +interface CodeSnippetProps { + file: string; + lang: string; + lines?: string; + startafter?: string; + endbefore?: string; + dedent?: number; + trim?: boolean; +} + +const API_BASE_URL = process.env.NEXT_PUBLIC_API_BASE_URL || ''; + +const fetchSnippet = async (params: CodeSnippetProps) => { + const queryParams = new URLSearchParams({ + file: params.file, + ...(params.lines && { lines: params.lines }), + ...(params.startafter && { startafter: params.startafter }), + ...(params.endbefore && { endbefore: params.endbefore }), + ...(params.dedent && { dedent: params.dedent.toString() }), + ...(params.trim !== undefined && { trim: params.trim.toString() }), + }); + + const response = await fetch(`${API_BASE_URL}/api/code-snippet?${queryParams}`); + if (!response.ok) { + throw new Error('Failed to fetch snippet'); + } + return response.text(); +}; + +export const CodeSnippet: React.FC = (props) => { + const [snippet, setSnippet] = useState(''); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + + useEffect(() => { + const fetchData = async () => { + try { + setIsLoading(true); + const data = await fetchSnippet(props); + setSnippet(data); + } catch (err) { + setError(err.message); + } finally { + setIsLoading(false); + } + }; + + fetchData(); + }, [props]); + + if (isLoading) return
      Loading snippet...
      ; + if (error) return
      Error: {error}
      ; + + return {snippet}; +}; diff --git a/docs/next/components/markdoc/FencedCodeBlock.tsx b/docs/next/components/markdoc/FencedCodeBlock.tsx new file mode 100644 index 0000000000000..dd914407c931b --- /dev/null +++ b/docs/next/components/markdoc/FencedCodeBlock.tsx @@ -0,0 +1,69 @@ +import {Transition} from '@headlessui/react'; +import Prism from 'prismjs'; +import React from 'react'; +import 'prismjs/components/prism-python'; +import {useCopyToClipboard} from 'react-use'; + +Prism.manual = true; + +export const Fence = (props) => { + const text = props.children; + const language = props['data-language']; + const [copied, setCopied] = React.useState(false); + const [state, copy] = useCopyToClipboard(); + + React.useEffect(() => { + Prism.highlightAll(); + }, [text]); + + const copyToClipboard = React.useCallback(() => { + if (typeof text === 'string') { + copy(text); + setCopied(true); + setTimeout(() => { + setCopied(false); + }, 3000); + } + }, [copy, text]); + + return ( +
      +
      +        {text}
      +      
      +
      + + {copied ? ( + + Copied + + ) : ( + + + + )} + +
      +
      + ); +}; diff --git a/docs/next/components/markdoc/Image.tsx b/docs/next/components/markdoc/Image.tsx new file mode 100644 index 0000000000000..ec44c5fd52d54 --- /dev/null +++ b/docs/next/components/markdoc/Image.tsx @@ -0,0 +1,45 @@ +import NextImage from 'next/image'; +import {useEffect, useState} from 'react'; +import Zoom from 'react-medium-image-zoom'; + +export const MyImage = ({children, ...props}) => { + // Manually set dimensions for images will be on props.width and props.height. + // Images without manual ddimensions will use the dimensions state and + // automatically set width and height as an effect in the client. + + const [dimensions, setDimensions] = useState({width: 0, height: 0}); + /* Only version images when all conditions meet: + * - use component in mdx + * - on non-master version + * - in public/images/ dir + */ + const {src} = props; + + useEffect(() => { + const img = new Image(); + img.src = src; + img.onload = () => { + setDimensions({width: img.width, height: img.height}); + }; + }, [src]); + + if (!src.startsWith('/images/')) { + return ( + + + + ); + } + return ( + + + + + + ); +}; diff --git a/docs/next/components/markdoc/ReferenceTable.tsx b/docs/next/components/markdoc/ReferenceTable.tsx new file mode 100644 index 0000000000000..d9482aa91ab16 --- /dev/null +++ b/docs/next/components/markdoc/ReferenceTable.tsx @@ -0,0 +1,37 @@ +import {unpackText} from 'util/unpackText'; + +import React from 'react'; + +export const ReferenceTable = ({children}) => { + return ( + + + + + + + + {children} +
      PropertyDescription
      + ); +}; + +export const ReferenceTableItem = ({propertyName, children}) => { + return ( +
      + {propertyName} + {unpackText(children)}