diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index dfc528f5..46f0f9b2 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -75,6 +75,8 @@ jobs: - name: Test run: | + echo "setuptools<72" > constraints.txt + poetry run pip install --constraint constraints.txt pyhive==0.7.0 poetry install -E all poetry run coverage run --source=metaphor -m pytest poetry run coverage xml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 74536ebb..f3c77252 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,6 +43,8 @@ jobs: - name: Lint & Type Check run: | + echo "setuptools<72" > constraints.txt + poetry run pip install --constraint constraints.txt pyhive==0.7.0 poetry install -E all poetry run flake8 poetry run black --check . @@ -50,10 +52,9 @@ jobs: poetry run mypy . --explicit-package-bases poetry run bandit -r . -c pyproject.toml - # TODO(SC-14236): Include __init__.py back to coverage after fixing async testing issues - name: Test run: | - poetry run coverage run --source=metaphor --omit='**/__init__.py' -m pytest + poetry run coverage run -m pytest poetry run coverage xml - name: Codecov diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7dfa8e02..c6e09fce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,7 +44,8 @@ repos: rev: 1.7.8 hooks: - id: bandit - args: ['--skip=B101,B106,B404,B603,B607,B608'] + args: [-c, pyproject.toml] + additional_dependencies: ['bandit[toml]'] - repo: https://github.com/pycqa/flake8 rev: 7.0.0 diff --git a/metaphor/common/entity_id.py b/metaphor/common/entity_id.py index 1a62e28f..864aadc9 100644 --- a/metaphor/common/entity_id.py +++ b/metaphor/common/entity_id.py @@ -121,3 +121,20 @@ def dataset_normalized_name( return normalize_full_dataset_name( ".".join([part for part in [db, schema, table] if part is not None]) ) + + +def parts_to_dataset_entity_id( + platform: DataPlatform, + account: Optional[str], + database: Optional[str] = None, + schema: Optional[str] = None, + table: Optional[str] = None, +) -> EntityId: + """ + converts parts of a dataset, its platform and account into a dataset entity ID + """ + return to_dataset_entity_id( + dataset_normalized_name(database, schema, table), + platform, + account, + ) diff --git a/metaphor/dbt/cloud/client.py b/metaphor/dbt/cloud/client.py index 7f723dfd..1896a8de 100644 --- a/metaphor/dbt/cloud/client.py +++ b/metaphor/dbt/cloud/client.py @@ -15,6 +15,7 @@ class DbtRun(NamedTuple): project_id: int job_id: int run_id: int + environment_id: int def __str__(self) -> str: return f"ID = {self.run_id}, project ID = {self.project_id}, job ID = {self.job_id}" @@ -115,6 +116,7 @@ def get_last_successful_run( project_id=run.get("project_id"), job_id=run.get("job_definition_id"), run_id=run.get("id"), + environment_id=run.get("environment_id"), ) offset += page_size diff --git a/metaphor/dbt/cloud/discovery_api.py b/metaphor/dbt/cloud/discovery_api.py deleted file mode 100644 index 85a20807..00000000 --- a/metaphor/dbt/cloud/discovery_api.py +++ /dev/null @@ -1,100 +0,0 @@ -from datetime import datetime -from typing import Any, Dict, List, Optional - -import requests -from pydantic import BaseModel, Field - -from metaphor.common.entity_id import dataset_normalized_name - - -class DiscoveryTestNode(BaseModel): - uniqueId: str - name: Optional[str] - status: Optional[str] - columnName: Optional[str] - executeCompletedAt: Optional[datetime] - dependsOn: List[str] - - @property - def models(self) -> List[str]: - return [x for x in self.dependsOn if x.startswith("model.")] - - -class DiscoveryModelNode(BaseModel): - uniqueId: str - name: Optional[str] - alias: Optional[str] - database: Optional[str] - schema_: Optional[str] = Field(alias="schema") - - @property - def normalized_name(self) -> str: - return dataset_normalized_name(self.database, self.schema_, self.name) - - -class DiscoveryAPI: - """ - A wrapper around dbt cloud's discovery API. - """ - - def __init__(self, url: str, token: str) -> None: - self.url = url - self.token = token - - def _send(self, query: str, variables: Dict[str, Any]): - resp = requests.post( - url=self.url, - headers={ - "authorization": f"Bearer {self.token}", - "content-type": "application/json", - }, - json={"query": query, "variables": variables}, - timeout=15, - ) - return resp.json()["data"] - - def get_all_job_model_names(self, job_id: int): - query = """ -query Models($jobId: BigInt!) { - job(id: $jobId) { - models { - alias - database - name - schema - uniqueId - } - } -} - """ - variables = { - "jobId": job_id, - } - - model_nodes = self._send(query, variables)["job"]["models"] - res: Dict[str, str] = {} - for model_node in model_nodes: - model = DiscoveryModelNode.model_validate(model_node) - res[model.uniqueId] = model.normalized_name - return res - - def get_all_job_tests(self, job_id: int): - query = """ -query Tests($jobId: BigInt!) { - job(id: $jobId) { - tests { - uniqueId - name - status - columnName - executeCompletedAt - dependsOn - } - } -} - """ - variables = { - "jobId": job_id, - } - tests = self._send(query, variables)["job"]["tests"] - return [DiscoveryTestNode.model_validate(test) for test in tests] diff --git a/metaphor/dbt/cloud/discovery_api/__init__.py b/metaphor/dbt/cloud/discovery_api/__init__.py new file mode 100644 index 00000000..447161ef --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/__init__.py @@ -0,0 +1,5 @@ +from .generated.client import Client as DiscoveryAPIClient + +__all__ = [ + "DiscoveryAPIClient", +] diff --git a/metaphor/dbt/cloud/discovery_api/apollo-codegen-config.json b/metaphor/dbt/cloud/discovery_api/apollo-codegen-config.json new file mode 100644 index 00000000..0432446c --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/apollo-codegen-config.json @@ -0,0 +1,83 @@ +{ + "schemaNamespace": "MySchema", + "schemaDownload": { + "downloadMethod": { + "introspection": { + "endpointURL": "https://metadata.cloud.getdbt.com/graphql", + "httpMethod": { + "POST": {} + }, + "includeDeprecatedInputValues": false, + "outputFormat": "SDL" + } + }, + "downloadTimeout": 60, + "headers": [], + "outputPath": "./schema.graphql" + }, + "experimentalFeatures": { + "clientControlledNullability": true, + "legacySafelistingCompatibleOperations": true + }, + "operationManifest": { + "generateManifestOnCodeGeneration": false, + "path": "/operation/identifiers/path", + "version": "persistedQueries" + }, + "input": { + "operationSearchPaths": [ + "/search/path/**/*.graphql" + ], + "schemaSearchPaths": [ + "/path/to/schema.graphqls" + ] + }, + "output": { + "operations": { + "absolute": { + "accessModifier": "internal", + "path": "/absolute/path" + } + }, + "schemaTypes": { + "moduleType": { + "embeddedInTarget": { + "accessModifier": "public", + "name": "SomeTarget" + } + }, + "path": "/output/path" + }, + "testMocks": { + "swiftPackage": { + "targetName": "SchemaTestMocks" + } + } + }, + "options": { + "additionalInflectionRules": [ + { + "pluralization": { + "replacementRegex": "animals", + "singularRegex": "animal" + } + } + ], + "cocoapodsCompatibleImportStatements": true, + "conversionStrategies": { + "enumCases": "none", + "fieldAccessors": "camelCase", + "inputObjects": "camelCase" + }, + "deprecatedEnumCases": "exclude", + "operationDocumentFormat": [ + "definition" + ], + "pruneGeneratedFiles": false, + "schemaDocumentation": "exclude", + "selectionSetInitializers": { + "localCacheMutations": true + }, + "warningsOnDeprecatedUsage": "exclude" + } +} diff --git a/metaphor/dbt/cloud/discovery_api/ariadne-codegen.toml b/metaphor/dbt/cloud/discovery_api/ariadne-codegen.toml new file mode 100644 index 00000000..36af0639 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/ariadne-codegen.toml @@ -0,0 +1,5 @@ +[tool.ariadne-codegen] +schema_path = "schema.graphql" +queries_path = "queries.graphql" +async_client = false +target_package_name = "generated" \ No newline at end of file diff --git a/metaphor/dbt/cloud/discovery_api/codegen.md b/metaphor/dbt/cloud/discovery_api/codegen.md new file mode 100644 index 00000000..87c55f13 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/codegen.md @@ -0,0 +1,38 @@ +# Generate GraphQL client code + +## Requirements + +- Python >= 3.9 +- `ariadne-codegen` + +## Usage + +```bash +cd metaphor/dbt/cloud/discovery_api +./codegen.sh +``` + +## Existing files + +### `codegen.sh` + +Run this script to get the schema from DBT's Apollo server, and generate the corresponding GraphQL client code. + +### `queries.graphql` + +The queries we will execute from the extractor class. + +### `apollo-codegen-config.json` + +Copied from [Full Codegen Configuration Example](https://www.apollographql.com/docs/ios/code-generation/codegen-configuration/#full-codegen-configuration-example) on Apollo's site. The only modifications are: + +- `endpointURL` +- `outputPath` + +### `ariadne-codegen.toml` + +Controls the behavior of `ariadne-codegen`. + +### `schema.graphql` + +The upstream DBT GraphQL schema. This file will be downloaded from upstream whenever `codegen.sh` is run. diff --git a/metaphor/dbt/cloud/discovery_api/codegen.sh b/metaphor/dbt/cloud/discovery_api/codegen.sh new file mode 100755 index 00000000..57a529a4 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/codegen.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +# The tool is called `apollo-ios-cli`: https://www.apollographql.com/docs/ios/code-generation/codegen-cli/ +# It does not mean it's iOS only. +APOLLO_IOS_CLI_VERSION=1.14.0 + +wget -c \ + "https://github.com/apollographql/apollo-ios/releases/download/${APOLLO_IOS_CLI_VERSION}/apollo-ios-cli.tar.gz" -O - | \ + tar -xz + +./apollo-ios-cli fetch-schema --path ./apollo-codegen-config.json + +rm -f ./apollo-ios-cli + +poetry run ariadne-codegen --config ariadne-codegen.toml +poetry run black . +poetry run isort . \ No newline at end of file diff --git a/metaphor/dbt/cloud/discovery_api/generated/__init__.py b/metaphor/dbt/cloud/discovery_api/generated/__init__.py new file mode 100644 index 00000000..3f5290ba --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/__init__.py @@ -0,0 +1,156 @@ +# Generated by ariadne-codegen + +from .base_client import BaseClient +from .base_model import BaseModel, Upload +from .client import Client +from .enums import ( + AccessLevel, + AncestorNodeType, + AppliedModelSortField, + FreshnessStatus, + PackageResourceType, + ReleaseVersion, + ResourceNodeType, + RunStatus, + SortDirection, + TestType, + TimePeriod, +) +from .exceptions import ( + GraphQLClientError, + GraphQLClientGraphQLError, + GraphQLClientGraphQLMultiError, + GraphQLClientHttpError, + GraphQLClientInvalidResponseError, +) +from .get_environment_adapter_type import ( + GetEnvironmentAdapterType, + GetEnvironmentAdapterTypeEnvironment, +) +from .get_job_run_macros import ( + GetJobRunMacros, + GetJobRunMacrosJob, + GetJobRunMacrosJobMacros, +) +from .get_job_run_metrics import ( + GetJobRunMetrics, + GetJobRunMetricsJob, + GetJobRunMetricsJobMetrics, + GetJobRunMetricsJobMetricsFilters, +) +from .get_job_run_models import ( + GetJobRunModels, + GetJobRunModelsJob, + GetJobRunModelsJobModels, + GetJobRunModelsJobModelsColumns, + GetJobRunModelsJobModelsRunResults, +) +from .get_job_run_snapshots import ( + GetJobRunSnapshots, + GetJobRunSnapshotsJob, + GetJobRunSnapshotsJobSnapshots, + GetJobRunSnapshotsJobSnapshotsColumns, +) +from .get_job_run_sources import ( + GetJobRunSources, + GetJobRunSourcesJob, + GetJobRunSourcesJobSources, + GetJobRunSourcesJobSourcesColumns, +) +from .get_job_run_tests import GetJobRunTests, GetJobRunTestsJob, GetJobRunTestsJobTests +from .get_macro_arguments import ( + GetMacroArguments, + GetMacroArgumentsEnvironment, + GetMacroArgumentsEnvironmentDefinition, + GetMacroArgumentsEnvironmentDefinitionMacros, + GetMacroArgumentsEnvironmentDefinitionMacrosEdges, + GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNode, + GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNodeArguments, + GetMacroArgumentsEnvironmentDefinitionMacrosPageInfo, +) +from .input_types import ( + AppliedModelSort, + AppliedResourcesFilter, + DefinitionResourcesFilter, + ExposureFilter, + GenericMaterializedFilter, + GroupFilter, + LineageFilter, + MacroDefinitionFilter, + ModelAppliedFilter, + ModelDefinitionFilter, + SourceAppliedFilter, + SourceDefinitionFilter, + TestAppliedFilter, + TestDefinitionFilter, +) + +__all__ = [ + "AccessLevel", + "AncestorNodeType", + "AppliedModelSort", + "AppliedModelSortField", + "AppliedResourcesFilter", + "BaseClient", + "BaseModel", + "Client", + "DefinitionResourcesFilter", + "ExposureFilter", + "FreshnessStatus", + "GenericMaterializedFilter", + "GetEnvironmentAdapterType", + "GetEnvironmentAdapterTypeEnvironment", + "GetJobRunMacros", + "GetJobRunMacrosJob", + "GetJobRunMacrosJobMacros", + "GetJobRunMetrics", + "GetJobRunMetricsJob", + "GetJobRunMetricsJobMetrics", + "GetJobRunMetricsJobMetricsFilters", + "GetJobRunModels", + "GetJobRunModelsJob", + "GetJobRunModelsJobModels", + "GetJobRunModelsJobModelsColumns", + "GetJobRunModelsJobModelsRunResults", + "GetJobRunSnapshots", + "GetJobRunSnapshotsJob", + "GetJobRunSnapshotsJobSnapshots", + "GetJobRunSnapshotsJobSnapshotsColumns", + "GetJobRunSources", + "GetJobRunSourcesJob", + "GetJobRunSourcesJobSources", + "GetJobRunSourcesJobSourcesColumns", + "GetJobRunTests", + "GetJobRunTestsJob", + "GetJobRunTestsJobTests", + "GetMacroArguments", + "GetMacroArgumentsEnvironment", + "GetMacroArgumentsEnvironmentDefinition", + "GetMacroArgumentsEnvironmentDefinitionMacros", + "GetMacroArgumentsEnvironmentDefinitionMacrosEdges", + "GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNode", + "GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNodeArguments", + "GetMacroArgumentsEnvironmentDefinitionMacrosPageInfo", + "GraphQLClientError", + "GraphQLClientGraphQLError", + "GraphQLClientGraphQLMultiError", + "GraphQLClientHttpError", + "GraphQLClientInvalidResponseError", + "GroupFilter", + "LineageFilter", + "MacroDefinitionFilter", + "ModelAppliedFilter", + "ModelDefinitionFilter", + "PackageResourceType", + "ReleaseVersion", + "ResourceNodeType", + "RunStatus", + "SortDirection", + "SourceAppliedFilter", + "SourceDefinitionFilter", + "TestAppliedFilter", + "TestDefinitionFilter", + "TestType", + "TimePeriod", + "Upload", +] diff --git a/metaphor/dbt/cloud/discovery_api/generated/base_client.py b/metaphor/dbt/cloud/discovery_api/generated/base_client.py new file mode 100644 index 00000000..e7ffb904 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/base_client.py @@ -0,0 +1,211 @@ +# Generated by ariadne-codegen + +import json +from typing import IO, Any, Dict, List, Optional, Tuple, TypeVar, cast + +import httpx +from pydantic import BaseModel +from pydantic_core import to_jsonable_python + +from .base_model import UNSET, Upload +from .exceptions import ( + GraphQLClientGraphQLMultiError, + GraphQLClientHttpError, + GraphQLClientInvalidResponseError, +) + +Self = TypeVar("Self", bound="BaseClient") + + +class BaseClient: + def __init__( + self, + url: str = "", + headers: Optional[Dict[str, str]] = None, + http_client: Optional[httpx.Client] = None, + ) -> None: + self.url = url + self.headers = headers + + self.http_client = http_client if http_client else httpx.Client(headers=headers) + + def __enter__(self: Self) -> Self: + return self + + def __exit__( + self, + exc_type: object, + exc_val: object, + exc_tb: object, + ) -> None: + self.http_client.close() + + def execute( + self, + query: str, + operation_name: Optional[str] = None, + variables: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> httpx.Response: + processed_variables, files, files_map = self._process_variables(variables) + + if files and files_map: + return self._execute_multipart( + query=query, + operation_name=operation_name, + variables=processed_variables, + files=files, + files_map=files_map, + **kwargs, + ) + + return self._execute_json( + query=query, + operation_name=operation_name, + variables=processed_variables, + **kwargs, + ) + + def get_data(self, response: httpx.Response) -> Dict[str, Any]: + if not response.is_success: + raise GraphQLClientHttpError( + status_code=response.status_code, response=response + ) + + try: + response_json = response.json() + except ValueError as exc: + raise GraphQLClientInvalidResponseError(response=response) from exc + + if (not isinstance(response_json, dict)) or ( + "data" not in response_json and "errors" not in response_json + ): + raise GraphQLClientInvalidResponseError(response=response) + + data = response_json.get("data") + errors = response_json.get("errors") + + if errors: + raise GraphQLClientGraphQLMultiError.from_errors_dicts( + errors_dicts=errors, data=data + ) + + return cast(Dict[str, Any], data) + + def _process_variables( + self, variables: Optional[Dict[str, Any]] + ) -> Tuple[ + Dict[str, Any], Dict[str, Tuple[str, IO[bytes], str]], Dict[str, List[str]] + ]: + if not variables: + return {}, {}, {} + + serializable_variables = self._convert_dict_to_json_serializable(variables) + return self._get_files_from_variables(serializable_variables) + + def _convert_dict_to_json_serializable( + self, dict_: Dict[str, Any] + ) -> Dict[str, Any]: + return { + key: self._convert_value(value) + for key, value in dict_.items() + if value is not UNSET + } + + def _convert_value(self, value: Any) -> Any: + if isinstance(value, BaseModel): + return value.model_dump(by_alias=True, exclude_unset=True) + if isinstance(value, list): + return [self._convert_value(item) for item in value] + return value + + def _get_files_from_variables( + self, variables: Dict[str, Any] + ) -> Tuple[ + Dict[str, Any], Dict[str, Tuple[str, IO[bytes], str]], Dict[str, List[str]] + ]: + files_map: Dict[str, List[str]] = {} + files_list: List[Upload] = [] + + def separate_files(path: str, obj: Any) -> Any: + if isinstance(obj, list): + nulled_list = [] + for index, value in enumerate(obj): + value = separate_files(f"{path}.{index}", value) + nulled_list.append(value) + return nulled_list + + if isinstance(obj, dict): + nulled_dict = {} + for key, value in obj.items(): + value = separate_files(f"{path}.{key}", value) + nulled_dict[key] = value + return nulled_dict + + if isinstance(obj, Upload): + if obj in files_list: + file_index = files_list.index(obj) + files_map[str(file_index)].append(path) + else: + file_index = len(files_list) + files_list.append(obj) + files_map[str(file_index)] = [path] + return None + + return obj + + nulled_variables = separate_files("variables", variables) + files: Dict[str, Tuple[str, IO[bytes], str]] = { + str(i): (file_.filename, cast(IO[bytes], file_.content), file_.content_type) + for i, file_ in enumerate(files_list) + } + return nulled_variables, files, files_map + + def _execute_multipart( + self, + query: str, + operation_name: Optional[str], + variables: Dict[str, Any], + files: Dict[str, Tuple[str, IO[bytes], str]], + files_map: Dict[str, List[str]], + **kwargs: Any, + ) -> httpx.Response: + data = { + "operations": json.dumps( + { + "query": query, + "operationName": operation_name, + "variables": variables, + }, + default=to_jsonable_python, + ), + "map": json.dumps(files_map, default=to_jsonable_python), + } + + return self.http_client.post(url=self.url, data=data, files=files, **kwargs) + + def _execute_json( + self, + query: str, + operation_name: Optional[str], + variables: Dict[str, Any], + **kwargs: Any, + ) -> httpx.Response: + headers: Dict[str, str] = {"Content-Type": "application/json"} + headers.update(kwargs.get("headers", {})) + + merged_kwargs: Dict[str, Any] = kwargs.copy() + merged_kwargs["headers"] = headers + + return self.http_client.post( + url=self.url, + content=json.dumps( + { + "query": query, + "operationName": operation_name, + "variables": variables, + }, + default=to_jsonable_python, + ), + **merged_kwargs, + ) diff --git a/metaphor/dbt/cloud/discovery_api/generated/base_model.py b/metaphor/dbt/cloud/discovery_api/generated/base_model.py new file mode 100644 index 00000000..a93b416e --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/base_model.py @@ -0,0 +1,30 @@ +# Generated by ariadne-codegen + +from io import IOBase + +from pydantic import BaseModel as PydanticBaseModel +from pydantic import ConfigDict + + +class UnsetType: + def __bool__(self) -> bool: + return False + + +UNSET = UnsetType() + + +class BaseModel(PydanticBaseModel): + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + arbitrary_types_allowed=True, + protected_namespaces=(), + ) + + +class Upload: + def __init__(self, filename: str, content: IOBase, content_type: str): + self.filename = filename + self.content = content + self.content_type = content_type diff --git a/metaphor/dbt/cloud/discovery_api/generated/client.py b/metaphor/dbt/cloud/discovery_api/generated/client.py new file mode 100644 index 00000000..0959d2b5 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/client.py @@ -0,0 +1,318 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import Any, Dict, Optional, Union + +from .base_client import BaseClient +from .base_model import UNSET, UnsetType +from .get_environment_adapter_type import GetEnvironmentAdapterType +from .get_job_run_macros import GetJobRunMacros +from .get_job_run_metrics import GetJobRunMetrics +from .get_job_run_models import GetJobRunModels +from .get_job_run_snapshots import GetJobRunSnapshots +from .get_job_run_sources import GetJobRunSources +from .get_job_run_tests import GetJobRunTests +from .get_macro_arguments import GetMacroArguments +from .input_types import MacroDefinitionFilter + + +def gql(q: str) -> str: + return q + + +class Client(BaseClient): + def get_job_run_models( + self, job_id: Any, run_id: Any, **kwargs: Any + ) -> GetJobRunModels: + query = gql( + """ + query GetJobRunModels($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + models { + alias + columns { + comment + description + meta + name + tags + type + } + compiledCode + compiledSql + database + dependsOn + description + environmentId + materializedType + meta + name + packageName + rawCode + rawSql + runResults { + status + executeCompletedAt + } + schema + tags + uniqueId + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, operation_name="GetJobRunModels", variables=variables, **kwargs + ) + data = self.get_data(response) + return GetJobRunModels.model_validate(data) + + def get_job_run_snapshots( + self, job_id: Any, run_id: Any, **kwargs: Any + ) -> GetJobRunSnapshots: + query = gql( + """ + query GetJobRunSnapshots($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + snapshots { + alias + columns { + comment + description + index + meta + name + tags + type + } + comment + compiledCode + compiledSql + database + description + environmentId + meta + name + owner + packageName + rawCode + rawSql + schema + tags + uniqueId + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, + operation_name="GetJobRunSnapshots", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return GetJobRunSnapshots.model_validate(data) + + def get_job_run_macros( + self, job_id: Any, run_id: Any, **kwargs: Any + ) -> GetJobRunMacros: + query = gql( + """ + query GetJobRunMacros($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + macros { + dependsOn + description + environmentId + macroSql + meta + name + packageName + uniqueId + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, operation_name="GetJobRunMacros", variables=variables, **kwargs + ) + data = self.get_data(response) + return GetJobRunMacros.model_validate(data) + + def get_job_run_sources( + self, job_id: Any, run_id: Any, **kwargs: Any + ) -> GetJobRunSources: + query = gql( + """ + query GetJobRunSources($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + sources { + columns { + description + name + } + database + description + identifier + schema + uniqueId + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, + operation_name="GetJobRunSources", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return GetJobRunSources.model_validate(data) + + def get_job_run_metrics( + self, + job_id: Any, + run_id: Union[Optional[Any], UnsetType] = UNSET, + **kwargs: Any + ) -> GetJobRunMetrics: + query = gql( + """ + query GetJobRunMetrics($jobId: BigInt!, $runId: BigInt) { + job(id: $jobId, runId: $runId) { + metrics { + packageName + label + description + dependsOn + uniqueId + timeGrains + timestamp + dimensions + filters { + field + operator + value + } + tags + type + sql + expression + calculation_method + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, + operation_name="GetJobRunMetrics", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return GetJobRunMetrics.model_validate(data) + + def get_job_run_tests( + self, job_id: Any, run_id: Any, **kwargs: Any + ) -> GetJobRunTests: + query = gql( + """ + query GetJobRunTests($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + tests { + columnName + compiledSql + compiledCode + dependsOn + name + uniqueId + } + } + } + """ + ) + variables: Dict[str, object] = {"jobId": job_id, "runId": run_id} + response = self.execute( + query=query, operation_name="GetJobRunTests", variables=variables, **kwargs + ) + data = self.get_data(response) + return GetJobRunTests.model_validate(data) + + def get_environment_adapter_type( + self, environment_id: Any, **kwargs: Any + ) -> GetEnvironmentAdapterType: + query = gql( + """ + query GetEnvironmentAdapterType($environmentId: BigInt!) { + environment(id: $environmentId) { + adapterType + dbtProjectName + } + } + """ + ) + variables: Dict[str, object] = {"environmentId": environment_id} + response = self.execute( + query=query, + operation_name="GetEnvironmentAdapterType", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return GetEnvironmentAdapterType.model_validate(data) + + def get_macro_arguments( + self, + environment_id: Any, + filter: Union[Optional[MacroDefinitionFilter], UnsetType] = UNSET, + after: Union[Optional[str], UnsetType] = UNSET, + **kwargs: Any + ) -> GetMacroArguments: + query = gql( + """ + query GetMacroArguments($environmentId: BigInt!, $filter: MacroDefinitionFilter, $after: String) { + environment(id: $environmentId) { + definition { + macros(filter: $filter, first: 500, after: $after) { + edges { + node { + arguments { + description + name + type + } + uniqueId + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } + } + """ + ) + variables: Dict[str, object] = { + "environmentId": environment_id, + "filter": filter, + "after": after, + } + response = self.execute( + query=query, + operation_name="GetMacroArguments", + variables=variables, + **kwargs + ) + data = self.get_data(response) + return GetMacroArguments.model_validate(data) diff --git a/metaphor/dbt/cloud/discovery_api/generated/enums.py b/metaphor/dbt/cloud/discovery_api/generated/enums.py new file mode 100644 index 00000000..eb4102f3 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/enums.py @@ -0,0 +1,79 @@ +# Generated by ariadne-codegen +# Source: schema.graphql + +from enum import Enum + + +class AccessLevel(str, Enum): + private = "private" + protected = "protected" + public = "public" + + +class AncestorNodeType(str, Enum): + Exposure = "Exposure" + Macro = "Macro" + Model = "Model" + Seed = "Seed" + Snapshot = "Snapshot" + Source = "Source" + + +class AppliedModelSortField(str, Enum): + executeCompletedAt = "executeCompletedAt" + queryUsageCount = "queryUsageCount" + rowCount = "rowCount" + uniqueId = "uniqueId" + + +class FreshnessStatus(str, Enum): + Error = "Error" + Pass = "Pass" + Warn = "Warn" + + +class PackageResourceType(str, Enum): + macro = "macro" + model = "model" + + +class ReleaseVersion(str, Enum): + latest = "latest" + none = "none" + old = "old" + prerelease = "prerelease" + + +class ResourceNodeType(str, Enum): + Exposure = "Exposure" + Macro = "Macro" + Metric = "Metric" + Model = "Model" + Seed = "Seed" + SemanticModel = "SemanticModel" + Snapshot = "Snapshot" + Source = "Source" + Test = "Test" + + +class RunStatus(str, Enum): + error = "error" + skipped = "skipped" + success = "success" + + +class SortDirection(str, Enum): + asc = "asc" + desc = "desc" + + +class TestType(str, Enum): + GENERIC_DATA_TEST = "GENERIC_DATA_TEST" + SINGULAR_DATA_TEST = "SINGULAR_DATA_TEST" + UNIT_TEST = "UNIT_TEST" + + +class TimePeriod(str, Enum): + day = "day" + hour = "hour" + minute = "minute" diff --git a/metaphor/dbt/cloud/discovery_api/generated/exceptions.py b/metaphor/dbt/cloud/discovery_api/generated/exceptions.py new file mode 100644 index 00000000..9fbe116d --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/exceptions.py @@ -0,0 +1,85 @@ +# Generated by ariadne-codegen + +from typing import Any, Dict, List, Optional, Union + +import httpx + + +class GraphQLClientError(Exception): + """Base exception.""" + + +class GraphQLClientHttpError(GraphQLClientError): + def __init__(self, status_code: int, response: httpx.Response) -> None: + self.status_code = status_code + self.response = response + + def __str__(self) -> str: + return f"HTTP status code: {self.status_code}" + + +class GraphQLClientInvalidResponseError(GraphQLClientError): + def __init__(self, response: httpx.Response) -> None: + self.response = response + + def __str__(self) -> str: + return "Invalid response format." + + +class GraphQLClientGraphQLError(GraphQLClientError): + def __init__( + self, + message: str, + locations: Optional[List[Dict[str, int]]] = None, + path: Optional[List[str]] = None, + extensions: Optional[Dict[str, object]] = None, + orginal: Optional[Dict[str, object]] = None, + ): + self.message = message + self.locations = locations + self.path = path + self.extensions = extensions + self.orginal = orginal + + def __str__(self) -> str: + return self.message + + @classmethod + def from_dict(cls, error: Dict[str, Any]) -> "GraphQLClientGraphQLError": + return cls( + message=error["message"], + locations=error.get("locations"), + path=error.get("path"), + extensions=error.get("extensions"), + orginal=error, + ) + + +class GraphQLClientGraphQLMultiError(GraphQLClientError): + def __init__( + self, + errors: List[GraphQLClientGraphQLError], + data: Optional[Dict[str, Any]] = None, + ): + self.errors = errors + self.data = data + + def __str__(self) -> str: + return "; ".join(str(e) for e in self.errors) + + @classmethod + def from_errors_dicts( + cls, errors_dicts: List[Dict[str, Any]], data: Optional[Dict[str, Any]] = None + ) -> "GraphQLClientGraphQLMultiError": + return cls( + errors=[GraphQLClientGraphQLError.from_dict(e) for e in errors_dicts], + data=data, + ) + + +class GraphQLClientInvalidMessageFormat(GraphQLClientError): + def __init__(self, message: Union[str, bytes]) -> None: + self.message = message + + def __str__(self) -> str: + return "Invalid message format." diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_environment_adapter_type.py b/metaphor/dbt/cloud/discovery_api/generated/get_environment_adapter_type.py new file mode 100644 index 00000000..127905bf --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_environment_adapter_type.py @@ -0,0 +1,20 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetEnvironmentAdapterType(BaseModel): + environment: "GetEnvironmentAdapterTypeEnvironment" + + +class GetEnvironmentAdapterTypeEnvironment(BaseModel): + adapter_type: Optional[str] = Field(alias="adapterType") + dbt_project_name: Optional[str] = Field(alias="dbtProjectName") + + +GetEnvironmentAdapterType.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_macros.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_macros.py new file mode 100644 index 00000000..d965a9af --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_macros.py @@ -0,0 +1,31 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import Any, List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunMacros(BaseModel): + job: Optional["GetJobRunMacrosJob"] + + +class GetJobRunMacrosJob(BaseModel): + macros: List["GetJobRunMacrosJobMacros"] + + +class GetJobRunMacrosJobMacros(BaseModel): + depends_on: List[str] = Field(alias="dependsOn") + description: Optional[str] + environment_id: Any = Field(alias="environmentId") + macro_sql: Optional[str] = Field(alias="macroSql") + meta: Optional[Any] + name: Optional[str] + package_name: Optional[str] = Field(alias="packageName") + unique_id: str = Field(alias="uniqueId") + + +GetJobRunMacros.model_rebuild() +GetJobRunMacrosJob.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_metrics.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_metrics.py new file mode 100644 index 00000000..1603b8ff --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_metrics.py @@ -0,0 +1,44 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunMetrics(BaseModel): + job: Optional["GetJobRunMetricsJob"] + + +class GetJobRunMetricsJob(BaseModel): + metrics: List["GetJobRunMetricsJobMetrics"] + + +class GetJobRunMetricsJobMetrics(BaseModel): + package_name: Optional[str] = Field(alias="packageName") + label: Optional[str] + description: Optional[str] + depends_on: List[str] = Field(alias="dependsOn") + unique_id: str = Field(alias="uniqueId") + time_grains: Optional[List[str]] = Field(alias="timeGrains") + timestamp: Optional[str] + dimensions: List[str] + filters: List["GetJobRunMetricsJobMetricsFilters"] + tags: Optional[List[str]] + type: Optional[str] + sql: Optional[str] + expression: Optional[str] + calculation_method: Optional[str] + + +class GetJobRunMetricsJobMetricsFilters(BaseModel): + field: Optional[str] + operator: Optional[str] + value: Optional[str] + + +GetJobRunMetrics.model_rebuild() +GetJobRunMetricsJob.model_rebuild() +GetJobRunMetricsJobMetrics.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_models.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_models.py new file mode 100644 index 00000000..80e7eff8 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_models.py @@ -0,0 +1,56 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import Any, List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunModels(BaseModel): + job: Optional["GetJobRunModelsJob"] + + +class GetJobRunModelsJob(BaseModel): + models: List["GetJobRunModelsJobModels"] + + +class GetJobRunModelsJobModels(BaseModel): + alias: Optional[str] + columns: Optional[List["GetJobRunModelsJobModelsColumns"]] + compiled_code: Optional[str] = Field(alias="compiledCode") + compiled_sql: Optional[str] = Field(alias="compiledSql") + database: Optional[str] + depends_on: List[str] = Field(alias="dependsOn") + description: Optional[str] + environment_id: Any = Field(alias="environmentId") + materialized_type: Optional[str] = Field(alias="materializedType") + meta: Optional[Any] + name: Optional[str] + package_name: Optional[str] = Field(alias="packageName") + raw_code: Optional[str] = Field(alias="rawCode") + raw_sql: Optional[str] = Field(alias="rawSql") + run_results: List["GetJobRunModelsJobModelsRunResults"] = Field(alias="runResults") + schema_: Optional[str] = Field(alias="schema") + tags: Optional[List[str]] + unique_id: str = Field(alias="uniqueId") + + +class GetJobRunModelsJobModelsColumns(BaseModel): + comment: Optional[str] + description: Optional[str] + meta: Optional[Any] + name: Optional[str] + tags: List[str] + type: Optional[str] + + +class GetJobRunModelsJobModelsRunResults(BaseModel): + status: Optional[str] + execute_completed_at: Optional[Any] = Field(alias="executeCompletedAt") + + +GetJobRunModels.model_rebuild() +GetJobRunModelsJob.model_rebuild() +GetJobRunModelsJobModels.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_snapshots.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_snapshots.py new file mode 100644 index 00000000..0d5d7acc --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_snapshots.py @@ -0,0 +1,51 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import Any, List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunSnapshots(BaseModel): + job: Optional["GetJobRunSnapshotsJob"] + + +class GetJobRunSnapshotsJob(BaseModel): + snapshots: List["GetJobRunSnapshotsJobSnapshots"] + + +class GetJobRunSnapshotsJobSnapshots(BaseModel): + alias: Optional[str] + columns: Optional[List["GetJobRunSnapshotsJobSnapshotsColumns"]] + comment: Optional[str] + compiled_code: Optional[str] = Field(alias="compiledCode") + compiled_sql: Optional[str] = Field(alias="compiledSql") + database: Optional[str] + description: Optional[str] + environment_id: Any = Field(alias="environmentId") + meta: Optional[Any] + name: Optional[str] + owner: Optional[str] + package_name: Optional[str] = Field(alias="packageName") + raw_code: Optional[str] = Field(alias="rawCode") + raw_sql: Optional[str] = Field(alias="rawSql") + schema_: Optional[str] = Field(alias="schema") + tags: Optional[List[str]] + unique_id: str = Field(alias="uniqueId") + + +class GetJobRunSnapshotsJobSnapshotsColumns(BaseModel): + comment: Optional[str] + description: Optional[str] + index: Optional[int] + meta: Optional[Any] + name: Optional[str] + tags: List[str] + type: Optional[str] + + +GetJobRunSnapshots.model_rebuild() +GetJobRunSnapshotsJob.model_rebuild() +GetJobRunSnapshotsJobSnapshots.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_sources.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_sources.py new file mode 100644 index 00000000..df708885 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_sources.py @@ -0,0 +1,35 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunSources(BaseModel): + job: Optional["GetJobRunSourcesJob"] + + +class GetJobRunSourcesJob(BaseModel): + sources: List["GetJobRunSourcesJobSources"] + + +class GetJobRunSourcesJobSources(BaseModel): + columns: Optional[List["GetJobRunSourcesJobSourcesColumns"]] + database: Optional[str] + description: Optional[str] + identifier: Optional[str] + schema_: Optional[str] = Field(alias="schema") + unique_id: str = Field(alias="uniqueId") + + +class GetJobRunSourcesJobSourcesColumns(BaseModel): + description: Optional[str] + name: Optional[str] + + +GetJobRunSources.model_rebuild() +GetJobRunSourcesJob.model_rebuild() +GetJobRunSourcesJobSources.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_job_run_tests.py b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_tests.py new file mode 100644 index 00000000..146407d6 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_job_run_tests.py @@ -0,0 +1,29 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetJobRunTests(BaseModel): + job: Optional["GetJobRunTestsJob"] + + +class GetJobRunTestsJob(BaseModel): + tests: List["GetJobRunTestsJobTests"] + + +class GetJobRunTestsJobTests(BaseModel): + column_name: Optional[str] = Field(alias="columnName") + compiled_sql: Optional[str] = Field(alias="compiledSql") + compiled_code: Optional[str] = Field(alias="compiledCode") + depends_on: List[str] = Field(alias="dependsOn") + name: Optional[str] + unique_id: str = Field(alias="uniqueId") + + +GetJobRunTests.model_rebuild() +GetJobRunTestsJob.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/get_macro_arguments.py b/metaphor/dbt/cloud/discovery_api/generated/get_macro_arguments.py new file mode 100644 index 00000000..8ac8f4cc --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/get_macro_arguments.py @@ -0,0 +1,55 @@ +# Generated by ariadne-codegen +# Source: queries.graphql + +from typing import List, Optional + +from pydantic import Field + +from .base_model import BaseModel + + +class GetMacroArguments(BaseModel): + environment: "GetMacroArgumentsEnvironment" + + +class GetMacroArgumentsEnvironment(BaseModel): + definition: Optional["GetMacroArgumentsEnvironmentDefinition"] + + +class GetMacroArgumentsEnvironmentDefinition(BaseModel): + macros: "GetMacroArgumentsEnvironmentDefinitionMacros" + + +class GetMacroArgumentsEnvironmentDefinitionMacros(BaseModel): + edges: List["GetMacroArgumentsEnvironmentDefinitionMacrosEdges"] + page_info: "GetMacroArgumentsEnvironmentDefinitionMacrosPageInfo" = Field( + alias="pageInfo" + ) + + +class GetMacroArgumentsEnvironmentDefinitionMacrosEdges(BaseModel): + node: "GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNode" + + +class GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNode(BaseModel): + arguments: List["GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNodeArguments"] + unique_id: str = Field(alias="uniqueId") + + +class GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNodeArguments(BaseModel): + description: Optional[str] + name: Optional[str] + type: Optional[str] + + +class GetMacroArgumentsEnvironmentDefinitionMacrosPageInfo(BaseModel): + has_next_page: bool = Field(alias="hasNextPage") + end_cursor: Optional[str] = Field(alias="endCursor") + + +GetMacroArguments.model_rebuild() +GetMacroArgumentsEnvironment.model_rebuild() +GetMacroArgumentsEnvironmentDefinition.model_rebuild() +GetMacroArgumentsEnvironmentDefinitionMacros.model_rebuild() +GetMacroArgumentsEnvironmentDefinitionMacrosEdges.model_rebuild() +GetMacroArgumentsEnvironmentDefinitionMacrosEdgesNode.model_rebuild() diff --git a/metaphor/dbt/cloud/discovery_api/generated/input_types.py b/metaphor/dbt/cloud/discovery_api/generated/input_types.py new file mode 100644 index 00000000..efa56a25 --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/generated/input_types.py @@ -0,0 +1,124 @@ +# Generated by ariadne-codegen +# Source: schema.graphql + +from typing import List, Optional + +from pydantic import Field + +from .base_model import BaseModel +from .enums import ( + AccessLevel, + AppliedModelSortField, + FreshnessStatus, + ResourceNodeType, + RunStatus, + SortDirection, + TestType, +) + + +class AppliedModelSort(BaseModel): + direction: SortDirection + field: AppliedModelSortField + + +class AppliedResourcesFilter(BaseModel): + tags: Optional[List[str]] = None + types: List[ResourceNodeType] + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class DefinitionResourcesFilter(BaseModel): + tags: Optional[List[str]] = None + types: List[ResourceNodeType] + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class ExposureFilter(BaseModel): + exposure_type: Optional[str] = Field(alias="exposureType", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class GenericMaterializedFilter(BaseModel): + database: Optional[str] = None + identifier: Optional[str] = None + schema_: Optional[str] = Field(alias="schema", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class GroupFilter(BaseModel): + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class LineageFilter(BaseModel): + column_names: Optional[List[str]] = Field(alias="columnNames", default=None) + exclude: Optional[str] = None + select: Optional[str] = None + tags: Optional[List[str]] = None + types: Optional[List[ResourceNodeType]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class MacroDefinitionFilter(BaseModel): + package_name: Optional[str] = Field(alias="packageName", default=None) + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class ModelAppliedFilter(BaseModel): + access: Optional[AccessLevel] = None + database: Optional[str] = None + group: Optional[str] = None + identifier: Optional[str] = None + last_run_status: Optional[RunStatus] = Field(alias="lastRunStatus", default=None) + modeling_layer: Optional[str] = Field(alias="modelingLayer", default=None) + package_name: Optional[str] = Field(alias="packageName", default=None) + schema_: Optional[str] = Field(alias="schema", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class ModelDefinitionFilter(BaseModel): + access: Optional[AccessLevel] = None + database: Optional[str] = None + group: Optional[str] = None + identifier: Optional[str] = None + modeling_layer: Optional[str] = Field(alias="modelingLayer", default=None) + schema_: Optional[str] = Field(alias="schema", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class SourceAppliedFilter(BaseModel): + database: Optional[str] = None + freshness_checked: Optional[bool] = Field(alias="freshnessChecked", default=None) + freshness_status: Optional[FreshnessStatus] = Field( + alias="freshnessStatus", default=None + ) + identifier: Optional[str] = None + schema_: Optional[str] = Field(alias="schema", default=None) + source_names: Optional[List[str]] = Field(alias="sourceNames", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class SourceDefinitionFilter(BaseModel): + database: Optional[str] = None + identifier: Optional[str] = None + schema_: Optional[str] = Field(alias="schema", default=None) + source_names: Optional[List[str]] = Field(alias="sourceNames", default=None) + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class TestAppliedFilter(BaseModel): + status: Optional[str] = None + tags: Optional[List[str]] = None + test_types: Optional[List[TestType]] = Field(alias="testTypes", default=None) + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) + + +class TestDefinitionFilter(BaseModel): + tags: Optional[List[str]] = None + unique_ids: Optional[List[str]] = Field(alias="uniqueIds", default=None) diff --git a/metaphor/dbt/cloud/discovery_api/queries.graphql b/metaphor/dbt/cloud/discovery_api/queries.graphql new file mode 100644 index 00000000..5db5ab2d --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/queries.graphql @@ -0,0 +1,165 @@ +query GetJobRunModels($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + models { + alias + columns { + comment + description + meta + name + tags + type + } + compiledCode + compiledSql + database + dependsOn + description + environmentId + materializedType + meta + name + packageName + rawCode + rawSql + runResults { + status + executeCompletedAt + } + schema + tags + uniqueId + } + } +} + +query GetJobRunSnapshots($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + snapshots { + alias + columns { + comment + description + index + meta + name + tags + type + } + comment + compiledCode + compiledSql + database + description + environmentId + meta + name + owner + packageName + rawCode + rawSql + schema + tags + uniqueId + } + } +} + +query GetJobRunMacros($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + macros { + dependsOn + description + environmentId + macroSql + meta + name + packageName + uniqueId + } + } +} + +query GetJobRunSources($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + sources { + columns { + description + name + } + database + description + identifier + schema + uniqueId + } + } +} + +query GetJobRunMetrics($jobId: BigInt!, $runId: BigInt) { + job(id: $jobId, runId: $runId) { + metrics { + packageName + label + description + dependsOn + uniqueId + timeGrains + timestamp + dimensions + filters { + field + operator + value + } + tags + type + sql + expression + calculation_method + } + } +} + +query GetJobRunTests($jobId: BigInt!, $runId: BigInt!) { + job(id: $jobId, runId: $runId) { + tests { + columnName + compiledSql + compiledCode + dependsOn + name + uniqueId + } + } +} + +query GetEnvironmentAdapterType($environmentId: BigInt!) { + environment(id: $environmentId) { + adapterType + dbtProjectName + } +} + +query GetMacroArguments($environmentId: BigInt!, $filter: MacroDefinitionFilter, $after: String) { + environment(id: $environmentId) { + definition { + macros(filter: $filter, first: 500, after: $after) { + edges { + node { + arguments { + description + name + type + } + uniqueId + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} diff --git a/metaphor/dbt/cloud/discovery_api/schema.graphql b/metaphor/dbt/cloud/discovery_api/schema.graphql new file mode 100644 index 00000000..4382a2aa --- /dev/null +++ b/metaphor/dbt/cloud/discovery_api/schema.graphql @@ -0,0 +1,5466 @@ +""" +A directive used by the Apollo iOS client to annotate operations or fragments that should be used exclusively for generating local cache mutations instead of as standard operations. +""" +directive @apollo_client_ios_localCacheMutation on QUERY | MUTATION | SUBSCRIPTION | FRAGMENT_DEFINITION + +""" +A directive used by the Apollo iOS code generation engine to generate custom import statements in operation or fragment definition files. An import statement to import a module with the name provided in the `module` argument will be added to the generated definition file. +""" +directive @import( + """The name of the module to import.""" + module: String! +) repeatable on QUERY | MUTATION | SUBSCRIPTION | FRAGMENT_DEFINITION + +""" +Directs the executor to defer this fragment when the `if` argument is true or undefined. +""" +directive @defer( + """Deferred when true or undefined.""" + if: Boolean + + """Unique name""" + label: String +) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +"""The access level of the model you are declaring properties for.""" +enum AccessLevel { + private + protected + public +} + +enum AncestorNodeType { + Exposure + Macro + Model + Seed + Snapshot + Source +} + +""" +This type can represent any scalar type such as int, float, string or boolean +""" +scalar AnyScalar + +"""Sort options for applied models""" +input AppliedModelSort { + """The direction to sort by""" + direction: SortDirection! + + """The field to sort by""" + field: AppliedModelSortField! +} + +enum AppliedModelSortField { + executeCompletedAt + queryUsageCount + rowCount + uniqueId +} + +input AppliedResourcesFilter { + """Filter results by tags""" + tags: [String!] + + """Types of resources to enumerate""" + types: [ResourceNodeType!]! + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +""" +The output of successful dbt DAG execution that creates or describes the state of the database. +""" +type AppliedState { + exposures( + after: String + + """Exposure Filter""" + filter: ExposureFilter + first: Int + ): ExposureAppliedStateNodeConnection! + + """ + The timestamp when the environment was last updated, which is when the run was ingested. + """ + lastUpdatedAt: DateTime + + """Project Lineage.""" + lineage( + """Lineage Filter""" + filter: LineageFilter! + ): [LineageNode!]! + + """ + Retrieve model information based on environmentId. + This will include any run from any job in the specified environment. + """ + modelHistoricalRuns( + """The model identifier""" + identifier: String + + """ + Number of last run results where this model was built to return (max of 20) + """ + lastRunCount: Int = 1 + + """The unique ID of this model""" + uniqueId: String + + """If true, return only runs that have catalog information for this model""" + withCatalog: Boolean = false + ): [ModelNode!]! + models( + after: String + + """Model Filter""" + filter: ModelAppliedFilter + first: Int + + """Sort by""" + sort: AppliedModelSort + ): ModelAppliedStateNodeConnection! + + """List of packages used in the environment""" + packages( + """Filter by resource type""" + resource: PackageResourceType! + ): [String!]! + + """The project name for this environment""" + resourceCounts: JSONObject + + """The paginated results of resources.""" + resources( + after: String + + """Resources Filter""" + filter: AppliedResourcesFilter! + first: Int + ): EnvironmentAppliedNodeConnection! + seeds( + after: String + + """Seeds Filter""" + filter: GenericMaterializedFilter + first: Int + ): SeedAppliedStateNodeConnection! + snapshots( + after: String + + """Seeds Filter""" + filter: GenericMaterializedFilter + first: Int + ): SnapshotAppliedStateNodeConnection! + sources( + after: String + + """Source Filter""" + filter: SourceAppliedFilter + first: Int + ): SourceAppliedStateNodeConnection! + + """The distinct tags of applied resources in the environment""" + tags: [Tag!]! + + """The definition state of a test""" + tests( + after: String + + """Test Filter""" + filter: TestAppliedFilter + first: Int + ): TestAppliedStateNodeConnection! +} + +""" +The `BigInt` scalar type represents non-fractional signed whole numeric values. +""" +scalar BigInt + +type CatalogColumn { + """The comment of this column (generated by docs)""" + comment: String + + """The description of this column (generated by docs)""" + description: String + + """The index of this column (generated by docs)""" + index: Int + + """ + The key-value store containing metadata relevant to this column (generated by docs) + """ + meta: JSONObject + + """The name of this column (generated by docs)""" + name: String + + """The tags associated with this column (generated by docs)""" + tags: [String!]! + + """The type of this column (generated by docs)""" + type: String +} + +type CatalogNode { + """Adapter agnostic bytes stat""" + bytesStat: BigInt + + """The columns for this node""" + columns: [CatalogColumn!] + + """The comment on this node""" + comment: String + + """The environment ID of this model node""" + environmentId: BigInt! + + """The job ID of the last run that built a catalog containing this node""" + jobDefinitionId: BigInt + + """The owner of this node""" + owner: String + + """Adapter agnostic row count""" + rowCountStat: BigInt + + """ + The timestamp when the catalog build containing this node was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run that built a catalog containing this node""" + runId: BigInt + + """The stats for this node""" + stats: [CatalogStat] + + """The type of this node""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +type CatalogStat { + """The description of this stat""" + description: String + + """The id of this stat""" + id: String + + """If this stat is included""" + include: Boolean + + """The label of this stat""" + label: String + + """The value of this stat""" + value: AnyScalar +} + +interface CloudArtifactInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! +} + +type Criteria { + """The freshness SLA that determines an error state for this source""" + errorAfter: CriteriaInfo + + """The freshness SLA that determines a warning state for this source""" + warnAfter: CriteriaInfo +} + +type CriteriaInfo { + """The number of periods in the freshness SLA""" + count: Int + + """ + The period component of the freshness SLA (examples: minute, hour, day) + """ + period: TimePeriod +} + +""" +A date-time string at UTC, such as 2007-12-03T10:15:30Z, compliant with the `date-time` format outlined in section 5.6 of the RFC 3339 profile of the ISO 8601 standard for representation of dates and times using the Gregorian calendar. +""" +scalar DateTime + +input DefinitionResourcesFilter { + """Filter results by tags""" + tags: [String!] + + """Types of resources to enumerate""" + types: [ResourceNodeType!]! + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +""" +The output of successful dbt DAG execution that creates or describes the state of the database. +""" +type DefinitionState { + """The paginated results of Exposures.""" + exposures( + after: String + + """Exposure Filter""" + filter: ExposureFilter + first: Int + ): ExposureDefinitionNodeConnection! + + """Groups for model governance.""" + groups( + after: String + + """Group Filter""" + filter: GroupFilter + first: Int + ): GroupNodeConnection! + + """ + The timestamp when the environment was last updated, which is when the run was ingested. + """ + lastUpdatedAt: DateTime + + """Project Lineage.""" + lineage( + """Lineage Filter""" + filter: LineageFilter! + ): [LineageNode!]! + macros( + after: String + + """Macro Filter""" + filter: MacroDefinitionFilter + first: Int + ): MacroDefinitionNodeConnection! + metrics( + after: String + + """Metrics Filter""" + filter: GenericMaterializedFilter + first: Int + ): MetricDefinitionNodeConnection! + models( + after: String + + """Model Filter""" + filter: ModelDefinitionFilter + first: Int + ): ModelDefinitionNodeConnection! + + """List of packages used in the environment""" + packages( + """Filter by resource type""" + resource: PackageResourceType! + ): [String!]! + + """The project name for this environment""" + resourceCounts: JSONObject + + """The paginated results of resources.""" + resources( + after: String + + """Resources Filter""" + filter: DefinitionResourcesFilter! + first: Int + ): EnvironmentDefinitionNodeConnection! + seeds( + after: String + + """Seeds Filter""" + filter: GenericMaterializedFilter + first: Int + ): SeedDefinitionNodeConnection! + semanticModels( + after: String + + """SemanticModels Filter""" + filter: GenericMaterializedFilter + first: Int + ): SemanticModelDefinitionNodeConnection! + snapshots( + after: String + + """Snapshots Filter""" + filter: GenericMaterializedFilter + first: Int + ): SnapshotDefinitionNodeConnection! + sources( + after: String + + """Source Filter""" + filter: SourceDefinitionFilter + first: Int + ): SourceDefinitionNodeConnection! + + """The distinct tags of definition resources in the environment""" + tags: [Tag!]! + tests( + after: String + + """Test Filter""" + filter: TestDefinitionFilter + first: Int + ): TestDefinitionNodeConnection! +} + +"""Retrieve information based on environment.""" +type Environment { + """The adapter type (data platform) that this environment executed with""" + adapterType: String + applied: AppliedState + + """The project name for this environment""" + dbtProjectName: String + definition: DefinitionState +} + +interface EnvironmentAppliedNestedNode { + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The user-supplied name of this particular node""" + name: String + + """The resource type of this node""" + resourceType: String! + + """The account ID of this node""" + uniqueId: String! +} + +interface EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type EnvironmentAppliedNodeConnection { + edges: [EnvironmentAppliedNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type EnvironmentAppliedNodeEdge { + cursor: String! + node: EnvironmentAppliedNode! +} + +interface EnvironmentDefinitionNestedNode { + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The user-supplied name of this particular node""" + name: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of this node""" + uniqueId: String! +} + +interface EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type EnvironmentDefinitionNodeConnection { + edges: [EnvironmentDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type EnvironmentDefinitionNodeEdge { + cursor: String! + node: EnvironmentDefinitionNode! +} + +type ExposureAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Type of this exposure""" + exposureType: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """Exposure Label""" + label: String + + """When the manifest was generated""" + manifestGeneratedAt: DateTime + + """Exposure maturity""" + maturity: String + + """The user-supplied name of this particular node""" + name: String + + """Email of the owner of this exposure""" + ownerEmail: String + + """Name of the owner of this exposure""" + ownerName: String + + """The package name of this exposure""" + packageName: String + + """The patch path of this exposure""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """The account ID of this node""" + uniqueId: String! + + """Exposure url""" + url: String +} + +type ExposureAppliedStateNode implements EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The ancestors of this exposure""" + ancestors( + """Filter ancestors to specific node types""" + types: [AncestorNodeType!]! + ): [EnvironmentAppliedNestedNode!]! + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Type of this exposure""" + exposureType: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this exposure""" + fqn: [String!]! + + """Exposure Label""" + label: String + + """When the manifest was generated""" + manifestGeneratedAt: DateTime + + """Exposure maturity""" + maturity: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """Email of the owner of this exposure""" + ownerEmail: String + + """Name of the owner of this exposure""" + ownerName: String + + """The package name of this exposure""" + packageName: String + + """The direct parents of this exposure""" + parents: [EnvironmentAppliedNestedNode!]! + + """The patch path of this exposure""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """Exposure url""" + url: String +} + +type ExposureAppliedStateNodeConnection { + edges: [ExposureAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ExposureAppliedStateNodeEdge { + cursor: String! + node: ExposureAppliedStateNode! +} + +type ExposureDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Type of this exposure""" + exposureType: String + + """Original file path to the node""" + filePath: String! + + """Exposure Label""" + label: String + + """When the manifest was generated""" + manifestGeneratedAt: DateTime + + """Exposure maturity""" + maturity: String + + """The user-supplied name of this particular node""" + name: String + + """Email of the owner of this exposure""" + ownerEmail: String + + """Name of the owner of this exposure""" + ownerName: String + + """The package name of this exposure""" + packageName: String + + """The patch path of this exposure""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of this node""" + uniqueId: String! + + """Exposure url""" + url: String +} + +type ExposureDefinitionNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The ancestors of this exposure""" + ancestors: [EnvironmentDefinitionNestedNode!]! + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Type of this exposure""" + exposureType: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this exposure""" + fqn: [String!]! + + """Exposure Label""" + label: String + + """When the manifest was generated""" + manifestGeneratedAt: DateTime + + """Exposure maturity""" + maturity: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """Email of the owner of this exposure""" + ownerEmail: String + + """Name of the owner of this exposure""" + ownerName: String + + """The package name of this exposure""" + packageName: String + + """The direct parents of this exposure""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this exposure""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """Exposure url""" + url: String +} + +type ExposureDefinitionNodeConnection { + edges: [ExposureDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ExposureDefinitionNodeEdge { + cursor: String! + node: ExposureDefinitionNode! +} + +input ExposureFilter { + """Filter results by exposure type""" + exposureType: String + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +"""Exposure Lineage Object""" +type ExposureLineageNode implements LineageGraphNode & LineageNode & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type ExposureNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The list of nodes this exposure depends on""" + dependsOn: [String!]! + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """The type of this exposure""" + exposureType: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The ISO time when the manifest for this exposure was generated""" + manifestGeneratedAt: DateTime + + """The maturity of this exposure""" + maturity: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The email of the owner of this exposure""" + ownerEmail: String + + """The name of the owner of this exposure""" + ownerName: String + + """The package name of this exposure""" + packageName: String + + """The list of resources that are ancestors of this exposure""" + parents: [NodeInterface!]! + + """The list of models that are parents of this exposure""" + parentsModels: [ModelNode!]! + + """The list of sources that are parents of this exposure""" + parentsSources: [SourceNode!]! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The tags associated with this node""" + tags: [String!] + + """The unique ID of this node""" + uniqueId: String! + + """The url of this exposure""" + url: String +} + +"""Public model from another project""" +type ExternalModelNode implements EnvironmentAppliedNestedNode & EnvironmentDefinitionNestedNode { + """The account ID of this node""" + accountId: BigInt! + database: String + dbtProjectName: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + identifier: String + latestVersion: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + packageName: String + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + relationName: String + + """The release version of this external model""" + releaseVersion: ReleaseVersion + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + version: String +} + +enum FreshnessStatus { + Error + Pass + Warn +} + +""" +Basic filter for nodes that are materialized (exists) in the data warehouse +""" +input GenericMaterializedFilter { + """Filter nodes by database""" + database: String + + """Filter nodes by identifier""" + identifier: String + + """Filter nodes by schema""" + schema: String + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +input GroupFilter { + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type GroupNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the group""" + filePath: String! + + """The unique ID of the job in dbt Cloud that this node was generated for""" + jobId: BigInt + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The number of models in this group""" + modelCount: Int! + + """""" + models: [ModelDefinitionNestedNode!]! + + """The name of this group""" + name: String + + """Owner (email) of this group""" + ownerEmail: String + + """Owner (name) of this group""" + ownerName: String + + """Package name of this group""" + packageName: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of the run in dbt Cloud that generated this node""" + runId: BigInt + tags: [String!]! @deprecated(reason: "Groups cannot have tags") + + """The unique ID of this node""" + uniqueId: String! +} + +type GroupNodeConnection { + edges: [GroupNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type GroupNodeEdge { + cursor: String! + node: GroupNode! +} + +""" +The `JSONObject` scalar type represents JSON objects as specified by [ECMA-404](http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf). +""" +scalar JSONObject + +"""""" +type JobNode { + exposure( + """The name of this exposure""" + name: String! + ): ExposureNode + + """Retrieve exposure information.""" + exposures: [ExposureNode!]! + + """The unique ID of this job""" + id: BigInt! + macro(uniqueId: String!): MacroNode + + """Retrieve macro information.""" + macros: [MacroNode!]! + + """Retrieve metric information.""" + metric(uniqueId: String!): MetricNode + + """Retrieve metric information.""" + metrics: [MetricNode!]! + model(uniqueId: String!): ModelNode + + """Retrieve model information.""" + models( + """The database where this table/view lives""" + database: String + + """The identifier of this table/view""" + identifier: String + + """The schema where this table/view lives""" + schema: String + ): [ModelNode!]! + + """The unique ID of run in dbt-cloud""" + runId: BigInt! + seed(uniqueId: String!): SeedNode + seeds: [SeedNode!]! + snapshot(uniqueId: String!): SnapshotNode + + """Retrieve model information.""" + snapshots: [SnapshotNode!]! + source(uniqueId: String!): SourceNode + + """Retrieve model information.""" + sources( + """The database where this table/view lives""" + database: String + + """The identifier of this table/view""" + identifier: String + + """The schema where this table/view lives""" + schema: String + ): [SourceNode!]! + + """Retrieve test information.""" + test( + """The unique ID of this particular test""" + uniqueId: String! + ): TestNode + + """Retrieve test information.""" + tests: [TestNode!]! +} + +input LineageFilter { + """Filter results by resources that have the specified column names""" + columnNames: [String!] + + """Filter out results by exclude selector statement. Experimental.""" + exclude: String + + """Filter results by select statement. Experimental.""" + select: String + + """Filter results by tags""" + tags: [String!] + + """Types of resources to enumerate""" + types: [ResourceNodeType!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +""" +Common lineage node fields for nodes that appear in the DAG (excl. macros and documentation) +""" +interface LineageGraphNode implements LineageNode { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +"""Common lineage node fields""" +interface LineageNode { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +"""Common lineage node fields for nodes that can be executed""" +interface LineageNodeExecutable implements LineageNode { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """The last run status of this node""" + lastRunStatus: String + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +"""Common lineage node fields for nodes that can be executed""" +interface LineageNodeTestable implements LineageNode { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") + + """The worst test status of this node""" + worstTestStatus: String +} + +"""Common lineage node fields for nodes with parents""" +interface LineageNodeWithParents implements LineageNode { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type MacroArguments { + description: String + name: String + type: String +} + +input MacroDefinitionFilter { + """Filter results by package""" + packageName: String + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type MacroDefinitionNestedNode implements EnvironmentAppliedNestedNode & EnvironmentDefinitionNestedNode { + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The unique ID of the job in dbt Cloud that this node was generated for""" + jobId: BigInt + + """The user-supplied name of this particular node""" + name: String + + """The package name of this macro""" + packageName: String + + """The patch path of this macro""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of the run in dbt Cloud that generated this node""" + runId: BigInt + + """The unique ID of this node""" + uniqueId: String! +} + +type MacroDefinitionNode implements EnvironmentAppliedNode & EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + arguments: [MacroArguments!]! + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The unique ID of the job in dbt Cloud that this node was generated for""" + jobId: BigInt + + """The sql that defines this macro""" + macroSql: String! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this macro""" + packageName: String + + """the nodes this macro depends on""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this macro""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of the run in dbt Cloud that generated this node""" + runId: BigInt + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type MacroDefinitionNodeConnection { + edges: [MacroDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type MacroDefinitionNodeEdge { + cursor: String! + node: MacroDefinitionNode! +} + +"""Macro Lineage Object""" +type MacroLineageNode implements LineageNode & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type MacroNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The list of nodes this macro depends on""" + dependsOn: [String!]! + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The sql that defines this macro""" + macroSql: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The name of this macro""" + name: String + + """The label associated with this macro""" + originalFilePath: String + + """The package name of this macro""" + packageName: String + + """The label associated with this macro""" + path: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """The label associated with this macro""" + rootPath: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The tags associated with this node""" + tags: [String!] + + """The unique ID of this node""" + uniqueId: String! +} + +type MetricDefinitionNestedNode implements EnvironmentAppliedNestedNode & EnvironmentDefinitionNestedNode { + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The filter of this metric""" + filter: JSONObject + + """The formula of this metric""" + formula: String + + """The fully qualified name of the node""" + fqn: [String!] + + """The group this metric belongs to""" + group: String + + """The job ID of the last run containing definition of this metric node""" + jobDefinitionId: BigInt + + """The user-supplied name of this particular node""" + name: String + + """The package name of this metric""" + packageName: String + + """The patch path of this metric""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this metric node""" + runId: BigInt + + """The type of this metric node""" + type: String + + """The type params of the metric""" + typeParams: JSONObject + + """The unique ID of this node""" + uniqueId: String! +} + +type MetricDefinitionNode implements EnvironmentAppliedNode & EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The ancestors of this metric""" + ancestors: [EnvironmentDefinitionNestedNode!]! + + """The direct children of this metric""" + children: [EnvironmentDefinitionNestedNode!]! + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The filter of this metric""" + filter: JSONObject + + """The formula of this metric""" + formula: String + + """The fully qualified name of this metric""" + fqn: [String!]! + + """The group this metric belongs to""" + group: String + + """The job ID of the last run containing definition of this metric node""" + jobDefinitionId: BigInt + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this metric""" + packageName: String + + """The direct parents of this metric""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this metric""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this metric node""" + runId: BigInt + + """The tags associated with this node""" + tags: [String!]! + + """The type of this metric node""" + type: String + + """The type params of the metric""" + typeParams: JSONObject + + """The unique ID of this node""" + uniqueId: String! +} + +type MetricDefinitionNodeConnection { + edges: [MetricDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type MetricDefinitionNodeEdge { + cursor: String! + node: MetricDefinitionNode! +} + +type MetricFilter { + """The field this filter applies to""" + field: String + + """The filter operator""" + operator: String + + """value""" + value: String +} + +"""Metric Lineage Object""" +type MetricLineageNode implements LineageGraphNode & LineageNode & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type MetricNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The calculation method of this metric""" + calculation_method: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The list of nodes this macro depends on""" + dependsOn: [String!]! + + """The user-supplied description for this node""" + description: String + + """A list of dimensions to group or filter the metric by""" + dimensions: [String!]! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """ + The name of the environment in dbt Cloud that this node was generated for + """ + environmentName: String + + """The expression to aggregate or calculate over""" + expression: String + + """A list of filters to apply before calculating the metric""" + filters: [MetricFilter!]! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The label associated with this metric""" + label: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The model associated with this metric""" + model: ModelNode + + """The user-supplied name of this particular node""" + name: String + + """The package name of this macro""" + packageName: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The expression to aggregate or calculate over""" + sql: String + + """The tags associated with this node""" + tags: [String!] + + """One or more "grains" at which the metric can be evaluated""" + timeGrains: [String!] + + """The time-based component of the metric""" + timestamp: String + + """The type of this metric""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +"""Filter for applied models""" +input ModelAppliedFilter { + """Filter models by access level""" + access: AccessLevel + + """Filter nodes by database""" + database: String + + """An optional configuration for grouping models""" + group: String + + """Filter nodes by identifier""" + identifier: String + lastRunStatus: RunStatus + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """Filter results by package""" + packageName: String + + """Filter nodes by schema""" + schema: String + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type ModelAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """The access level of this model""" + access: String + + """The package alias of this model""" + alias: String + + """Whether or not this model has contracts on it""" + contractEnforced: Boolean + + """The package database of this model""" + database: String + + """The dbt version that ran this model""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The execution info for this node""" + executionInfo: ModelExecutionInfoNode + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The group this model belongs to""" + group: String + + """The latest version of this model""" + latestVersion: String + + """The materialized type of this model""" + materializedType: String + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """The user-supplied name of this particular node""" + name: String + + """The package name of this model""" + packageName: String + + """The patch path of this snapshot""" + patchPath: String + + """The release version of this model""" + releaseVersion: ReleaseVersion + + """The resource type of this node""" + resourceType: String! + + """The package schema of this model""" + schema: String + + """test status dictionary""" + testStatuses: JSONObject! + + """The account ID of this node""" + uniqueId: String! + + """The version of this model""" + version: String +} + +type ModelAppliedStateNode implements EnvironmentAppliedNode { + """The access level of this model""" + access: String + + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this model""" + alias: String + + """All the parents of this model""" + ancestors( + """Filter ancestors to specific node types""" + types: [AncestorNodeType!]! + ): [EnvironmentAppliedNestedNode!]! + + """The catalog info for this node""" + catalog: CatalogNode + + """The direct children nodes of this model""" + children: [EnvironmentAppliedNestedNode!]! + + """The compiled code for this model""" + compiledCode: String + + """The key-value store of configuration for this node""" + config: JSONObject + + """The constraints on this model""" + constraints: [ModelLevelConstraint!]! + + """Whether or not this model has contracts on it""" + contractEnforced: Boolean + + """The package database of this model""" + database: String + + """The dbt version that ran this model""" + dbtVersion: String + + """The deprecation date of this model""" + deprecationDate: DateTime + + """The package description of this model""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """The execution info for this node""" + executionInfo: ModelExecutionInfoNode + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this model""" + fqn: [String!]! + + """The group this model belongs to""" + group: String + + """The language of this model""" + language: String + + """The latest version of this model""" + latestVersion: String + + """The materialized type of this model""" + materializedType: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """The user-supplied name of this particular node""" + name: String + + """The package name of this model""" + packageName: String + + """The packages of this model""" + packages: [String!]! + + """The direct parent nodes of this model""" + parents: [EnvironmentAppliedNestedNode!]! + + """The patch path of this snapshot""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw code for this model""" + rawCode: String + + """The release version of this model""" + releaseVersion: ReleaseVersion + + """The resource type of this node""" + resourceType: String! + + """The package schema of this model""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this model""" + tests: [TestAppliedStateNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this model""" + version: String + + """The versions of this model. Only returned if requesting 1 model.""" + versions: [ModelVersion!]! +} + +type ModelAppliedStateNodeConnection { + edges: [ModelAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ModelAppliedStateNodeEdge { + cursor: String! + node: ModelAppliedStateNode! +} + +input ModelDefinitionFilter { + """Filter models by access level""" + access: AccessLevel + + """Filter nodes by database""" + database: String + + """An optional configuration for grouping models""" + group: String + + """Filter nodes by identifier""" + identifier: String + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """Filter nodes by schema""" + schema: String + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type ModelDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """The access level of this model""" + access: String + + """The package alias of this model""" + alias: String + + """Whether or not this model has contracts on it""" + contractEnforced: Boolean + + """The package database of this model""" + database: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The group this model belongs to""" + group: String + + """The job ID of the last run containing definition of this snapshot node""" + jobDefinitionId: BigInt + + """The latest version of this model""" + latestVersion: String + + """The materialized type of this model""" + materializedType: String + + """The user-supplied name of this particular node""" + name: String + + """The package name of this model""" + packageName: String + + """The patch path of this snapshot""" + patchPath: String + + """The release version of this model""" + releaseVersion: ReleaseVersion + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this snapshot node""" + runId: BigInt + + """The package schema of this model""" + schema: String + + """The unique ID of this node""" + uniqueId: String! + + """The version of this model""" + version: String +} + +type ModelDefinitionNode implements EnvironmentDefinitionNode { + """The access level of this model""" + access: String + + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this model""" + alias: String + + """All the parents of this model""" + ancestors( + """Filter ancestors to specific node types""" + types: [AncestorNodeType!]! + ): [EnvironmentDefinitionNestedNode!]! + + """The direct children nodes of this model""" + children: [EnvironmentDefinitionNestedNode!]! + + """The constraints on this model""" + constraints: [ModelLevelConstraint!]! + + """Whether or not this model has contracts on it""" + contractEnforced: Boolean + + """The package database of this model""" + database: String + + """The deprecation date of this model""" + deprecationDate: DateTime + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this model""" + fqn: [String!]! + + """The group this model belongs to""" + group: String + + """The job ID of the last run containing definition of this snapshot node""" + jobDefinitionId: BigInt + + """The language of this model""" + language: String + + """The latest version of this model""" + latestVersion: String + + """The materialized type of this model""" + materializedType: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """The user-supplied name of this particular node""" + name: String + + """The package name of this model""" + packageName: String + + """The packages of this model""" + packages: [String!]! + + """The direct parent nodes of this model""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this snapshot""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw code for this model""" + rawCode: String + + """The release version of this model""" + releaseVersion: ReleaseVersion + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this snapshot node""" + runId: BigInt + + """The package schema of this model""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this model""" + tests: [TestDefinitionNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this model""" + version: String +} + +type ModelDefinitionNodeConnection { + edges: [ModelDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type ModelDefinitionNodeEdge { + cursor: String! + node: ModelDefinitionNode! +} + +type ModelExecutionInfoNode { + """The ISO timestamp when this model completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this model began compilation""" + compileStartedAt: DateTime + + """The ISO timestamp when this model finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this model began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this model""" + executionTime: Float + + """ + The job ID of the last run that either succeeded or failed in building this model node + """ + lastJobDefinitionId: BigInt + + """ + The error message if the last run failed in building this model, else null + """ + lastRunError: String + + """ + The timestamp when the last run that either succeeded or failed in building this model was completed, ISO formatted timestamp + """ + lastRunGeneratedAt: DateTime + + """ + The run ID of the last run that either succeeded or failed in building this model node + """ + lastRunId: BigInt + + """ + The status of the last run that either succeeded or failed in building this model + """ + lastRunStatus: RunStatus + + """The job ID of the last successful run that built this model node""" + lastSuccessJobDefinitionId: BigInt + + """The run ID of the last successful run that built this model node""" + lastSuccessRunId: BigInt + + """ + The elapsed time of the specific run step (dbt run) that generated this model node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime +} + +type ModelLevelConstraint { + """Columns this constraint pertains to""" + columns: [String!] + + """Constraint qualifying expression if provided""" + expression: String + + """The name of this constraint""" + name: String + + """Type of constraint""" + type: String + + """Warn if unenforced""" + warnUnenforced: Boolean + + """Warn if unsupported""" + warnUnsupported: Boolean +} + +"""Model Lineage Object""" +type ModelLineageNode implements LineageGraphNode & LineageNode & LineageNodeExecutable & LineageNodeTestable & LineageNodeWithParents { + """The access level of this node""" + access: String + + """The package alias of this node""" + alias: String + + """The package database of this node""" + database: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String + + """The last run status of this node""" + lastRunStatus: String + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String + + """The layer of the DAG that the model belongs to""" + modelingLayer: String + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this model depends on""" + publicParentIds: [String!]! + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String + + """The worst test status of this node""" + worstTestStatus: String +} + +type ModelNode implements CloudArtifactInterface & NodeInterface { + """The access level of this model""" + access: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The alias of this model""" + alias: String + + """The args passed to the dbt step which generated this model""" + args: String + + """The list of nodes that depend on this model""" + childrenL1: [String!]! + + """The columns of this model""" + columns: [CatalogColumn!] + + """The comment on this model""" + comment: String + + """The ISO timestamp when the model compilation started""" + compileCompletedAt: DateTime + + """The ISO timestamp when the model compilation started""" + compileStartedAt: DateTime + + """The compiled code of this model""" + compiledCode: String + + """The compiled sql of this model""" + compiledSql: String + + """The database this model is defined in""" + database: String + + """The dbt group id of this model""" + dbtGroup: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The list of nodes this model depends on""" + dependsOn: [String!]! + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """The error message if there was an error else null""" + error: String + + """The ISO timestamp when the model execution completed""" + executeCompletedAt: DateTime + + """The ISO timestamp when the model execution started""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this model""" + executionTime: Float + + """The identifier of the run step that generated this model""" + invocationId: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The language of this model""" + language: String + + """The materialized type of this model""" + materializedType: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The owner of this model""" + owner: String + + """The package name of this model""" + packageName: String + + """The packages of this model""" + packages: [String!] + + """Retrieve parents information.""" + parentsModels: [ModelNode!]! + + """Retrieve parents source information.""" + parentsSources: [SourceNode!]! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The raw code of this model""" + rawCode: String + + """The raw sql of this model""" + rawSql: String + + """The resource type of this node""" + resourceType: String! + + """ + The elapsed time of the specific run step (dbt run) that generated this model node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The run results of this model""" + runResults: [RunInfoNode!]! + + """The schema this model is defined in""" + schema: String + + """Whether this model was skipped""" + skip: Boolean + + """The stats of this model""" + stats: [CatalogStat!]! + + """The database reported status of this model""" + status: String + + """The tags associated with this node""" + tags: [String!] + + """Retrieve test information.""" + tests: [TestNode!]! + + """The thread that ran the execution of this model""" + threadId: String + + """The type of this model""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +type ModelVersion { + """The version of this model""" + name: String! + + """The release version of this model""" + releaseVersion: String! + + """The unique ID of this model""" + uniqueId: String! +} + +interface NodeInterface { + """The version of dbt used to produce this node""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The resource type of this node""" + resourceType: String! + + """The tags associated with this node""" + tags: [String!] + + """The unique ID of this node""" + uniqueId: String! +} + +"""The type of package resource""" +enum PackageResourceType { + macro + model +} + +type PageInfo { + endCursor: String + hasNextPage: Boolean! + hasPreviousPage: Boolean! + startCursor: String +} + +type Query { + environment( + """The environment id for this model""" + id: BigInt! + ): Environment! + + """Retrieve exposure information.""" + exposure( + """The unique ID of this job""" + jobId: Int! + + """The name of the exposure""" + name: String! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): ExposureNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve exposure information.""" + exposures( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [ExposureNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + job( + """The unique ID of this job""" + id: BigInt! + + """The unique ID of the run in dbt-cloud""" + runId: BigInt + ): JobNode + macro( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + uniqueId: String! + ): MacroNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + macros( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [MacroNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + metric( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + uniqueId: String! + ): MetricNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve metric information.""" + metrics( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [MetricNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + model( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + uniqueId: String! + ): ModelNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """ + Retrieve model information based on environmentId. + This will include any run from any job in the specified environment. + """ + modelByEnvironment( + """The environment id for this model""" + environmentId: Int! + + """The model identifier""" + identifier: String + + """ + Number of last run results where this model was built to return (max of 20) + """ + lastRunCount: Int = 1 + + """The unique ID of this model""" + uniqueId: String + + """If true, return only runs that have catalog information for this model""" + withCatalog: Boolean = false + ): [ModelNode!]! @deprecated(reason: "Use the environment.applied.modelHistoricalRuns query instead.") + + """Retrieve model information.""" + models( + database: String + identifier: String + + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + schema: String + ): [ModelNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + seed( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + uniqueId: String! + ): SeedNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve seed information.""" + seeds( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [SeedNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve snapshot information.""" + snapshot( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + + """The unique ID of the snapshot""" + uniqueId: String! + ): SnapshotNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve snapshot information.""" + snapshots( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [SnapshotNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + source( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + uniqueId: String! + ): SourceNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve source information.""" + sources( + database: String + identifier: String + + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + schema: String + ): [SourceNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve test information.""" + test( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + + """The unique ID of this particular test""" + uniqueId: String! + ): TestNode @deprecated(reason: "We recommend using the environment query to get the latest state instead.") + + """Retrieve test information.""" + tests( + """The unique ID of this job""" + jobId: Int! + + """The unique ID of the run in dbt-cloud""" + runId: Int + ): [TestNode!]! @deprecated(reason: "We recommend using the environment query to get the latest state instead.") +} + +enum ReleaseVersion { + latest + none + old + prerelease +} + +"""The type of resource""" +enum ResourceNodeType { + Exposure + Macro + Metric + Model + Seed + SemanticModel + Snapshot + Source + Test +} + +"""Run information for a model""" +type RunInfoNode { + """The args passed to the dbt step which generated this model""" + args: String + + """The ISO timestamp when the model compilation started""" + compileCompletedAt: DateTime + + """The ISO timestamp when the model compilation started""" + compileStartedAt: DateTime + + """The error message if there was an error else null""" + error: String + + """The ISO timestamp when the model execution completed""" + executeCompletedAt: DateTime + + """The ISO timestamp when the model execution started""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this model""" + executionTime: Float + + """The identifier of the run step that generated this model""" + invocationId: String + + """ + The elapsed time of the specific run step (dbt run) that generated this model node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """Whether this model was skipped""" + skip: Boolean + + """The database reported status of this model""" + status: String + + """The thread that ran the execution of this model""" + threadId: String +} + +"""Status of a run""" +enum RunStatus { + error + skipped + success +} + +type SeedAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """The package alias of this seed""" + alias: String + + """The package database of this seed""" + database: String + + """dbt version""" + dbtVersion: String + + """The description of this seed""" + description: String + + """The execution info for this seed""" + executionInfo: SeedExecutionInfoNode! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The user-supplied name of this particular node""" + name: String + + """The package name of this seed""" + packageName: String + + """The patch path of this seed""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """The package schema of this seed""" + schema: String + + """The account ID of this node""" + uniqueId: String! +} + +type SeedAppliedStateNode implements EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this seed""" + alias: String + + """The catalog info for this node""" + catalog: CatalogNode + + """The direct children of this seed""" + children: [EnvironmentAppliedNestedNode!]! + + """The package database of this seed""" + database: String + + """dbt version""" + dbtVersion: String + + """The description of this seed""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """The execution info for this seed""" + executionInfo: SeedExecutionInfoNode! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this seed""" + fqn: [String!]! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this seed""" + packageName: String + + """The patch path of this seed""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """The package schema of this seed""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this seed""" + tests: [TestAppliedStateNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SeedAppliedStateNodeConnection { + edges: [SeedAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SeedAppliedStateNodeEdge { + cursor: String! + node: SeedAppliedStateNode! +} + +type SeedDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """The package alias of this seed""" + alias: String + + """The package database of this seed""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The job ID of the last run containing definition of this seed node""" + jobDefinitionId: BigInt + + """The user-supplied name of this particular node""" + name: String + + """The package name of this seed""" + packageName: String + + """The patch path of this seed""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this seed node""" + runId: BigInt + + """The package schema of this seed""" + schema: String + + """The unique ID of this node""" + uniqueId: String! +} + +type SeedDefinitionNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this seed""" + alias: String + + """The direct children of this seed""" + children: [EnvironmentDefinitionNestedNode!]! + + """The package database of this seed""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this seed""" + fqn: [String!]! + + """The job ID of the last run containing definition of this seed node""" + jobDefinitionId: BigInt + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this seed""" + packageName: String + + """The patch path of this seed""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this seed node""" + runId: BigInt + + """The package schema of this seed""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SeedDefinitionNodeConnection { + edges: [SeedDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SeedDefinitionNodeEdge { + cursor: String! + node: SeedDefinitionNode! +} + +type SeedExecutionInfoNode { + """The ISO timestamp when this seed completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this seed began compilation""" + compileStartedAt: DateTime + + """The ISO timestamp when this seed finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this seed began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this seed""" + executionTime: Float + + """ + The job ID of the last run that either succeeded or failed in building this seed node + """ + lastJobDefinitionId: BigInt + + """ + The error message if the last run failed in building this seed, else null + """ + lastRunError: String + + """ + The timestamp when the last run that either succeeded or failed in building this seed was completed, ISO formatted timestamp + """ + lastRunGeneratedAt: DateTime + + """ + The run ID of the last run that either succeeded or failed in building this seed node + """ + lastRunId: BigInt + + """True if this seed was skipped in the last run, otherwise false""" + lastRunSkip: Boolean + + """ + The status of the last run that either succeeded or failed in building this seed + """ + lastRunStatus: String + + """The job ID of the last successful run that built this seed node""" + lastSuccessJobDefinitionId: BigInt + + """The run ID of the last successful run that built this seed node""" + lastSuccessRunId: BigInt + + """ + The elapsed time of the specific run step (dbt run) that generated this seed node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime +} + +"""Seed Lineage Object""" +type SeedLineageNode implements LineageGraphNode & LineageNode & LineageNodeExecutable & LineageNodeTestable { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this seed""" + alias: String + + """The package database of this seed""" + database: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """The last run status of this node""" + lastRunStatus: String + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this seed""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") + + """The worst test status of this node""" + worstTestStatus: String +} + +type SeedNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The identifier of this table/view""" + alias: String + + """The list of nodes that depend on this seed""" + childrenL1: [String!] + + """The columns for this seed""" + columns: [CatalogColumn!] + + """The comment on this seed""" + comment: String + + """The ISO timestamp when this seed completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this seed began compilation""" + compileStartedAt: DateTime + + """The compiled code for this seed""" + compiledCode: String + + """The compiled SQL for this seed""" + compiledSql: String + + """The database where this table/view lives""" + database: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """The error message if there was an error, else null""" + error: String + + """The ISO timestamp when this seed finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this seed began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this seed""" + executionTime: Float + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The owner of this seed""" + owner: String + + """The package name of this seed""" + packageName: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The raw code for this seed""" + rawCode: String + + """The raw SQL for this seed""" + rawSql: String + + """The resource type of this node""" + resourceType: String! + + """ + The elapsed time of the specific run step (dbt run) that generated this seed node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The schema where this table/view lives""" + schema: String + + """True if this seed was skipped, otherwise false""" + skip: Boolean + + """The stats for this seed""" + stats: [CatalogStat!]! + + """The database-reported status of this seed""" + status: String + + """The tags associated with this node""" + tags: [String!] + + """The thread that ran the execution of this seed""" + thread_id: String @deprecated(reason: "No thread ID associated with seeds") + + """The type of this seed""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +type SemanticModelDefinitionNestedNode implements EnvironmentAppliedNestedNode & EnvironmentDefinitionNestedNode { + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The dimensions of this semantic model""" + dimensions: [SemanticModelDimension!]! + + """The entities of this semantic model""" + entities: [SemanticModelEntity!]! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The unique ID of the job in dbt Cloud that this node was generated for""" + jobDefinitionId: BigInt + + """The measures of this semantic model""" + measures: [SemanticModelMeasure!]! + + """The user-supplied name of this particular node""" + name: String + + """The package name of this semantic model""" + packageName: String + + """The patch path of this semantic model""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of the run in dbt Cloud that generated this node""" + runId: BigInt + + """The unique ID of this node""" + uniqueId: String! +} + +type SemanticModelDefinitionNode implements EnvironmentAppliedNode & EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """All upstream nodes of this semantic model""" + ancestors: [EnvironmentDefinitionNestedNode!]! + + """The direct children nodes of this semantic model""" + children: [EnvironmentDefinitionNestedNode!]! + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The dimensions of this semantic model""" + dimensions: [SemanticModelDimension!]! + + """The entities of this semantic model""" + entities: [SemanticModelEntity!]! + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this semantic model""" + fqn: [String!]! + + """The unique ID of the job in dbt Cloud that this node was generated for""" + jobDefinitionId: BigInt + + """The measures of this semantic model""" + measures: [SemanticModelMeasure!]! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this semantic model""" + packageName: String + + """The direct parents nodes of this semantic model""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this semantic model""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The unique ID of the run in dbt Cloud that generated this node""" + runId: BigInt + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SemanticModelDefinitionNodeConnection { + edges: [SemanticModelDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SemanticModelDefinitionNodeEdge { + cursor: String! + node: SemanticModelDefinitionNode! +} + +type SemanticModelDimension { + """Description of the dimension""" + description: String + + """Name of the dimension""" + name: String + + """Type of the dimension""" + type: String + + """Type parameters of the dimension""" + typeParams: JSONObject +} + +type SemanticModelEntity { + """Description of the entity""" + description: String + + """Name of the entity""" + name: String + + """Type of the entity""" + type: String +} + +"""SemanticModel Lineage Object""" +type SemanticModelLineageNode implements LineageGraphNode & LineageNode & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type SemanticModelMeasure { + """Aggregation type of the measure""" + agg: String + + """Whether or not this measure creates a metric""" + createMetric: Boolean + + """Description of the measure""" + description: String + + """Expression of the measure""" + expr: String + + """Name of the measure""" + name: String +} + +type SnapshotAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """The package alias of this snapshot""" + alias: String + + """The package database of this snapshot""" + database: String + + """The dbt version that ran this snapshot""" + dbtVersion: String + + """The package description of this snapshot""" + description: String + + """The execution info for this node""" + executionInfo: SnapshotExecutionInfoNode + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The user-supplied name of this particular node""" + name: String + + """The package name of this snapshot""" + packageName: String + + """The patch path of this snapshot""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """The package schema of this snapshot""" + schema: String + + """The account ID of this node""" + uniqueId: String! +} + +type SnapshotAppliedStateNode implements EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this snapshot""" + alias: String + + """The catalog info for this node""" + catalog: CatalogNode + + """The direct children nodes of this snapshot""" + children: [EnvironmentAppliedNestedNode!]! + + """The compiled code for this snapshot""" + compiledCode: String + + """The key-value store of configuration for this node""" + config: JSONObject + + """The package database of this snapshot""" + database: String + + """The dbt version that ran this snapshot""" + dbtVersion: String + + """The package description of this snapshot""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """The execution info for this node""" + executionInfo: SnapshotExecutionInfoNode + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this snapshot""" + fqn: [String!]! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this snapshot""" + packageName: String + + """the nodes this snapshot depends on""" + parents: [EnvironmentAppliedNestedNode!]! + + """The patch path of this snapshot""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw code for this snapshot""" + rawCode: String + + """The resource type of this node""" + resourceType: String! + + """The package schema of this snapshot""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this snapshot""" + tests: [TestAppliedStateNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SnapshotAppliedStateNodeConnection { + edges: [SnapshotAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SnapshotAppliedStateNodeEdge { + cursor: String! + node: SnapshotAppliedStateNode! +} + +type SnapshotDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """The package alias of this snapshot""" + alias: String + + """The package database of this snapshot""" + database: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The job ID of the last run containing definition of this snapshot node""" + jobDefinitionId: BigInt + + """The user-supplied name of this particular node""" + name: String + + """The package name of this snapshot""" + packageName: String + + """The patch path of this snapshot""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this snapshot node""" + runId: BigInt + + """The package schema of this snapshot""" + schema: String + + """The unique ID of this node""" + uniqueId: String! +} + +type SnapshotDefinitionNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The package alias of this snapshot""" + alias: String + + """The direct children nodes of this snapshot""" + children: [EnvironmentDefinitionNestedNode!]! + + """The package database of this snapshot""" + database: String + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the environment in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this snapshot""" + fqn: [String!]! + + """The job ID of the last run containing definition of this snapshot node""" + jobDefinitionId: BigInt + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The package name of this snapshot""" + packageName: String + + """the nodes this snapshot depends on""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this snapshot""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw code for this snapshot""" + rawCode: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this snapshot node""" + runId: BigInt + + """The package schema of this snapshot""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SnapshotDefinitionNodeConnection { + edges: [SnapshotDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SnapshotDefinitionNodeEdge { + cursor: String! + node: SnapshotDefinitionNode! +} + +type SnapshotExecutionInfoNode { + """The ISO timestamp when this snapshot completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this snapshot began compilation""" + compileStartedAt: DateTime + + """The ISO timestamp when this snapshot finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this snapshot began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this snapshot""" + executionTime: Float + + """ + The job ID of the last run that either succeeded or failed in building this snapshot node + """ + lastJobDefinitionId: BigInt + + """ + The error message if the last run failed in building this snapshot, else null + """ + lastRunError: String + + """ + The timestamp when the last run that either succeeded or failed in building this snapshot was completed, ISO formatted timestamp + """ + lastRunGeneratedAt: DateTime + + """ + The run ID of the last run that either succeeded or failed in building this snapshot node + """ + lastRunId: BigInt + + """ + The status of the last run that either succeeded or failed in building this snapshot + """ + lastRunStatus: String + + """The job ID of the last successful run that built this snapshot node""" + lastSuccessJobDefinitionId: BigInt + + """The run ID of the last successful run that built this snapshot node""" + lastSuccessRunId: BigInt + + """ + The elapsed time of the specific run step (dbt run) that generated this snapshot node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime +} + +"""Snapshot Lineage Object""" +type SnapshotLineageNode implements LineageGraphNode & LineageNode & LineageNodeExecutable & LineageNodeTestable & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this seed""" + alias: String + + """The package database of this seed""" + database: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """The last run status of this node""" + lastRunStatus: String + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this seed""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") + + """The worst test status of this node""" + worstTestStatus: String +} + +type SnapshotNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The alias of this snapshot""" + alias: String + + """The list of nodes that depend on this snapshot""" + childrenL1: [String!]! + + """The columns of this snapshot""" + columns: [CatalogColumn!] + + """The comment on this snapshot""" + comment: String + + """The ISO timestamp when the snapshot compilation started""" + compileCompletedAt: DateTime + + """The ISO timestamp when the snapshot compilation started""" + compileStartedAt: DateTime + + """The compiled code of this snapshot""" + compiledCode: String + + """The compiled sql of this snapshot""" + compiledSql: String + + """The database this snapshot is defined in""" + database: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """The error message if there was an error, otherwise null""" + error: String + + """The ISO timestamp when the snapshot execution completed""" + executeCompletedAt: DateTime + + """The ISO timestamp when the snapshot execution started""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this snapshot""" + executionTime: Float + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The owner of this snapshot""" + owner: String + + """The package name of this snapshot""" + packageName: String + + """Retrieve parents information.""" + parentsModels: [ModelNode!]! + + """Retrieve parents source information.""" + parentsSources: [SourceNode!]! + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The raw code of this snapshot""" + rawCode: String + + """The raw sql of this snapshot""" + rawSql: String + + """The resource type of this node""" + resourceType: String! + + """ + The elapsed time of the specific run step (dbt run) that generated this snapshot node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The schema this snapshot is defined in""" + schema: String + + """True if this snapshot was skipped, otherwise false""" + skip: Boolean + + """The stats of this snapshot""" + stats: [CatalogStat!]! + + """The database-reported status of this snapshot""" + status: String + + """The tags associated with this node""" + tags: [String!] + + """The thread that ran the execution of this snapshot""" + threadId: String + + """The type of this snapshot""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +enum SortDirection { + asc + desc +} + +input SourceAppliedFilter { + """Filter nodes by database""" + database: String + + """Filter results by freshness checked""" + freshnessChecked: Boolean + + """Filter results by freshness status""" + freshnessStatus: FreshnessStatus + + """Filter nodes by identifier""" + identifier: String + + """Filter nodes by schema""" + schema: String + + """Names of the sources to return""" + sourceNames: [String!] + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type SourceAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """The package database of this source""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The freshness info for this node""" + freshness: SourceFreshnessNode! + + """The user-supplied name of this particular source""" + identifier: String + + """The loader for this source""" + loader: String + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this source""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """The package schema of this source""" + schema: String + + """The source_description of this source""" + sourceDescription: String + + """The user-supplied name of this particular source""" + sourceName: String + + """test status dictionary""" + testStatuses: JSONObject! + + """The account ID of this node""" + uniqueId: String! +} + +type SourceAppliedStateNode implements EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The catalog info for this node""" + catalog: CatalogNode + + """The direct children of this source""" + children: [EnvironmentAppliedNestedNode!]! + + """The package database of this source""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this source""" + fqn: [String!]! + + """The freshness info for this node""" + freshness: SourceFreshnessNode! + + """The user-supplied name of this particular source""" + identifier: String + + """The loader for this source""" + loader: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this source""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """The package schema of this source""" + schema: String + + """The source_description of this source""" + sourceDescription: String + + """The user-supplied name of this particular source""" + sourceName: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this source""" + tests: [TestAppliedStateNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SourceAppliedStateNodeConnection { + edges: [SourceAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SourceAppliedStateNodeEdge { + cursor: String! + node: SourceAppliedStateNode! +} + +input SourceDefinitionFilter { + """Filter nodes by database""" + database: String + + """Filter nodes by identifier""" + identifier: String + + """Filter nodes by schema""" + schema: String + + """Names of the sources to return""" + sourceNames: [String!] + + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type SourceDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """The package database of this source""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """Original file path to the node""" + filePath: String! + + """The user-supplied name of this particular source""" + identifier: String + + """The loader for this source""" + loader: String + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this source""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The package schema of this source""" + schema: String + + """The source_description of this source""" + sourceDescription: String + + """The user-supplied name of this particular source""" + sourceName: String + + """The unique ID of this node""" + uniqueId: String! +} + +type SourceDefinitionNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The direct children of this source""" + children: [EnvironmentDefinitionNestedNode!]! + + """The package database of this source""" + database: String + + """dbt version""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this source""" + fqn: [String!]! + + """The user-supplied name of this particular source""" + identifier: String + + """The loader for this source""" + loader: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this source""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The package schema of this source""" + schema: String + + """The source_description of this source""" + sourceDescription: String + + """The user-supplied name of this particular source""" + sourceName: String + + """The tags associated with this node""" + tags: [String!]! + + """tests for this source""" + tests: [TestDefinitionNestedNode!]! + + """The unique ID of this node""" + uniqueId: String! +} + +type SourceDefinitionNodeConnection { + edges: [SourceDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type SourceDefinitionNodeEdge { + cursor: String! + node: SourceDefinitionNode! +} + +type SourceFreshnessNode { + """The freshness SLA specified for this source table""" + criteria: Criteria! + + """Whether or not the freshness was checked""" + freshnessChecked: Boolean + + """The job ID of the last run that checked for source freshness""" + freshnessJobDefinitionId: BigInt + + """ + The timestamp when the source freshness was checked, ISO formatted timestamp + """ + freshnessRunGeneratedAt: DateTime + + """The run ID of the last run that checked for source freshness""" + freshnessRunId: BigInt + + """ + The state of the freshness check for this source. Can be one of null, "pass", "fail", or "error". A value of null indicates that there was no freshness check. + """ + freshnessStatus: FreshnessStatus + + """ + The max value in the designated "loaded_at" column for this source table. ISO formatted timestamp + """ + maxLoadedAt: DateTime + + """The delta (in seconds) between max_loaded_at and snapshotted_at""" + maxLoadedAtTimeAgoInS: Float + + """ + The time when this source was checked for freshnesses by dbt. ISO formatted timestamp + """ + snapshottedAt: DateTime +} + +"""Source Lineage Object""" +type SourceLineageNode implements LineageGraphNode & LineageNode & LineageNodeTestable { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") + + """The worst test status of this node""" + worstTestStatus: String +} + +type SourceNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The list of nodes that depend on this source""" + childrenL1: [String!]! + + """The columns of this source""" + columns: [CatalogColumn!] + + """The comment on this source""" + comment: String + + """The freshness SLA specified for this source table""" + criteria: Criteria! + + """The database this source is defined in""" + database: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """Whether or not the freshness was checked""" + freshnessChecked: Boolean + + """The identifier of this table/view""" + identifier: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The loader of this source""" + loader: String + + """ + The max value in the designated "loaded_at" column for this source table. ISO formatted timestamp + """ + maxLoadedAt: DateTime + + """The delta (in seconds) between max_loaded_at and snapshotted_at""" + maxLoadedAtTimeAgoInS: Float + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """The owner of this source""" + owner: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The resource type of this node""" + resourceType: String! + + """ + The elapsed time of the specific run step (dbt source snapshot-freshness) that generated this source node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt source snapshot-freshness) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The schema this source is defined in""" + schema: String + + """ + The time when this source was checked for freshnesses by dbt. ISO formatted timestamp + """ + snapshottedAt: DateTime + + """The user-supplied description for this source""" + sourceDescription: String + + """The logical name of this source""" + sourceName: String + + """ + The state of the freshness check for this source. Can be one of null, "pass", "fail", or "error". A value of null indicates that there was no freshness check. + """ + state: FreshnessStatus + + """The stats of this source""" + stats: [CatalogStat!]! + + """The tags associated with this node""" + tags: [String!] + + """Retrieve test information.""" + tests: [TestNode!]! + + """The type of this source""" + type: String + + """The unique ID of this node""" + uniqueId: String! +} + +"""A dbt resource's tag""" +type Tag { + """The tag name""" + name: String! +} + +input TestAppliedFilter { + status: String + + """Filter results by tags""" + tags: [String!] + testTypes: [TestType!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type TestAppliedStateNestedNode implements EnvironmentAppliedNestedNode { + """The name of the column that the test pertains to""" + columnName: String + + """dbt version""" + dbtVersion: String + + """Descriptions of the test""" + description: String + + """The execution info for this node""" + executionInfo: TestExecutionInfoNode! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of the node""" + fqn: [String!] + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this test""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + testMetadata: TestMetadata! + + """The type of test""" + testType: TestType! + + """The account ID of this node""" + uniqueId: String! +} + +"""The definition state of a test""" +type TestAppliedStateNode implements EnvironmentAppliedNode { + """The account ID of this node""" + accountId: BigInt! + + """The name of the column that the test pertains to""" + columnName: String + + """The compiled code of this test""" + compiledCode: String + + """The key-value store of configuration for this node""" + config: JSONObject + + """dbt version""" + dbtVersion: String + + """Descriptions of the test""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """The status of the event""" + eventStatus: JSONObject + + """The execution info for this node""" + executionInfo: TestExecutionInfoNode! + + """The expect for this test""" + expect: JSONObject + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this test""" + fqn: [String!]! + + """The given for this test""" + given: [JSONObject!] + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The model of the test""" + model: String + + """The user-supplied name of this particular node""" + name: String + + """The number of expect rows for this test""" + numExpectRows: Int + + """The number of given for this test""" + numGiven: Int + + """The number of given rows for this test""" + numGivenRows: Int + + """The overrides for this test""" + overrides: JSONObject + + """The direct parents of this test""" + parents: [EnvironmentAppliedNestedNode!]! + + """The patch path of this test""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw sql of this test""" + rawCode: String + + """The resource type of this node""" + resourceType: String! + + """The tags associated with this node""" + tags: [String!]! + testMetadata: TestMetadata! + + """The type of test""" + testType: TestType! + + """The unique ID of the node that this test is testing""" + testedNodeUniqueId: String + + """The unique ID of the input node that this test is testing""" + thisInputNodeUniqueId: String + + """The unique ID of this node""" + uniqueId: String! +} + +type TestAppliedStateNodeConnection { + edges: [TestAppliedStateNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type TestAppliedStateNodeEdge { + cursor: String! + node: TestAppliedStateNode! +} + +input TestDefinitionFilter { + """Filter results by tags""" + tags: [String!] + + """unique ids of nodes to return""" + uniqueIds: [String!] +} + +type TestDefinitionNestedNode implements EnvironmentDefinitionNestedNode { + """The name of the column that the test pertains to""" + columnName: String + + """dbt version""" + dbtVersion: String + + """Descriptions of the test""" + description: String + + """Original file path to the node""" + filePath: String! + + """The job ID of the last run containing definition of this test node""" + jobDefinitionId: BigInt + + """The user-supplied name of this particular node""" + name: String + + """The patch path of this test""" + patchPath: String + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this test node""" + runId: BigInt + + """The type of test""" + testType: TestType + + """The unique ID of this node""" + uniqueId: String! +} + +type TestDefinitionNode implements EnvironmentDefinitionNode { + """The account ID of this node""" + accountId: BigInt! + + """The name of the column that the test pertains to""" + columnName: String + + """dbt version""" + dbtVersion: String + + """Descriptions of the test""" + description: String + + """The environment ID of this node""" + environmentId: BigInt! + + """The status of the event""" + eventStatus: JSONObject! + + """The expect for this test""" + expect: JSONObject! + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this test""" + fqn: [String!]! + + """The given for this test""" + given: [JSONObject!]! + + """The job ID of the last run containing definition of this test node""" + jobDefinitionId: BigInt + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The model of the test""" + model: String + + """The user-supplied name of this particular node""" + name: String + + """The number of expect rows for this test""" + numExpectRows: Int + + """The number of given for this test""" + numGiven: Int + + """The number of given rows for this test""" + numGivenRows: Int + + """The overrides for this test""" + overrides: JSONObject! + + """The direct parents of this test""" + parents: [EnvironmentDefinitionNestedNode!]! + + """The patch path of this test""" + patchPath: String + + """The project ID of this node""" + projectId: BigInt! + + """The raw sql of this test""" + rawCode: String + + """The resource type of this node""" + resource: String! + + """The resource type of this node""" + resourceType: String! + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """The run ID of the last run containing definition of this test node""" + runId: BigInt + + """The tags associated with this node""" + tags: [String!]! + + """The type of test""" + testType: TestType + + """The unique ID of the node that this test is testing""" + testedNodeUniqueId: String + + """The unique ID of the input node that this test is testing""" + thisInputNodeUniqueId: String + + """The unique ID of this node""" + uniqueId: String! +} + +type TestDefinitionNodeConnection { + edges: [TestDefinitionNodeEdge!]! + pageInfo: PageInfo! + totalCount: Int! +} + +type TestDefinitionNodeEdge { + cursor: String! + node: TestDefinitionNode! +} + +type TestExecutionInfoNode { + """The ISO timestamp when this test completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this test began compilation""" + compileStartedAt: DateTime + + """The ISO timestamp when this test finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this test began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this test""" + executionTime: Float + + """ + The job ID of the last run that either succeeded or failed in building this test node + """ + lastJobDefinitionId: BigInt + + """ + The error message if the last run failed in building this test, else null + """ + lastRunError: String + + """The number of failures in the last run""" + lastRunFailures: BigInt + + """ + The timestamp when the last run that either succeeded or failed in building this test was completed, ISO formatted timestamp + """ + lastRunGeneratedAt: DateTime + + """ + The run ID of the last run that either succeeded or failed in building this test node + """ + lastRunId: BigInt + + """ + The status of the last run that either succeeded or failed in building this test + """ + lastRunStatus: String + + """The job ID of the last successful run that built this test node""" + lastSuccessJobDefinitionId: BigInt + + """The run ID of the last successful run that built this test node""" + lastSuccessRunId: BigInt + + """ + The elapsed time of the specific run step (dbt run) that generated this test node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime +} + +"""Test Lineage Object""" +type TestLineageNode implements LineageGraphNode & LineageNode & LineageNodeExecutable & LineageNodeWithParents { + """The access level of this node""" + access: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package alias of this node""" + alias: String @deprecated(reason: "Use ...on syntax from specific types") + + """The package database of this node""" + database: String @deprecated(reason: "Use ...on syntax from specific types") + + """Original file path to the node""" + filePath: String! + + """The fully qualified name of this node""" + fqn: [String!]! + + """The group this node belongs to""" + group: String @deprecated(reason: "Use ...on syntax from specific types") + + """The last run status of this node""" + lastRunStatus: String + + """ + True if the node matches a selector method, rather than matching via graph traversal + """ + matchesMethod: Boolean + + """The materialization type""" + materializationType: String @deprecated(reason: "Use ...on syntax from specific types") + + """The user-supplied name of this particular node""" + name: String + + """the nodes this node depends on""" + parentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The project id of this node""" + projectId: BigInt! + + """the public nodes this resource depends on""" + publicParentIds: [String!]! @deprecated(reason: "Use ...on syntax from specific types") + + """The resource type of this node""" + resourceType: ResourceNodeType! + + """The package schema of this node""" + schema: String @deprecated(reason: "Use ...on syntax from specific types") + + """The tags associated with this node""" + tags: [String!]! + + """The unique ID of this node""" + uniqueId: String! + + """The version of this node""" + version: String @deprecated(reason: "Use ...on syntax from specific types") +} + +type TestMetadata { + columnName: String + kwargs: JSONObject + name: String + namespace: String +} + +type TestNode implements CloudArtifactInterface & NodeInterface { + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + accountId: BigInt! + + """The name of the column that the test pertains to""" + columnName: String + + """The ISO timestamp when this test completed compilation""" + compileCompletedAt: DateTime + + """The ISO timestamp when this test began compilation""" + compileStartedAt: DateTime + + """The compiled code of this test""" + compiledCode: String + + """The compiled sql of this test""" + compiledSql: String + + """The version of dbt used to produce this node""" + dbtVersion: String + + """The list of nodes this test depends on""" + dependsOn: [String!]! + + """The user-supplied description for this node""" + description: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + environmentId: BigInt! + + """ + The error message in the case of state being "error" + """ + error: String + + """The ISO timestamp when this test finished running""" + executeCompletedAt: DateTime + + """The ISO timestamp when this test began running""" + executeStartedAt: DateTime + + """The total time elapsed during the execution of this test""" + executionTime: Float + + """The failure result""" + fail: Boolean + + """The identifier of the run step that generated this test""" + invocationId: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + jobId: BigInt! + + """The language of this test""" + language: String + + """The key-value store containing metadata relevant to this node""" + meta: JSONObject + + """The user-supplied name of this particular node""" + name: String + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + projectId: BigInt! + + """The raw code of this test""" + rawCode: String + + """The raw sql of this test""" + rawSql: String + + """The resource type of this node""" + resourceType: String! + + """ + The elapsed time of the specific run step (dbt run) that generated this test node + """ + runElapsedTime: Float + + """ + The timestamp when the run step (dbt run) was completed, ISO formatted timestamp + """ + runGeneratedAt: DateTime + + """ + The unique ID of the account in dbt Cloud that this node was generated for + """ + runId: BigInt! + + """The skipped result""" + skip: Boolean + + """ + The test results. Can be one of, in order of severity, "error", "fail", "warn", "pass" + """ + state: String + + """ + Status details the results of a test. For errors, it reads "ERROR". For other test results, it represents the number of rows that contribute to the test results + """ + status: String + + """The tags associated with this node""" + tags: [String!] + + """The thread that ran the execution of this test""" + threadId: String + + """The unique ID of this node""" + uniqueId: String! + + """The warning result""" + warn: Boolean +} + +"""The type of test""" +enum TestType { + GENERIC_DATA_TEST + SINGULAR_DATA_TEST + UNIT_TEST +} + +""" +The period component of the freshness SLA (examples: minute, hour, day) +""" +enum TimePeriod { + day + hour + minute +} \ No newline at end of file diff --git a/metaphor/dbt/cloud/extractor.py b/metaphor/dbt/cloud/extractor.py index 7f5a8082..5084572e 100644 --- a/metaphor/dbt/cloud/extractor.py +++ b/metaphor/dbt/cloud/extractor.py @@ -1,23 +1,16 @@ -from collections import defaultdict -from typing import Collection, Dict, List, Optional +from typing import Collection, Dict, List + +import httpx from metaphor.common.base_extractor import BaseExtractor from metaphor.common.event_util import ENTITY_TYPES from metaphor.common.logger import get_logger -from metaphor.dbt.artifact_parser import dbt_run_result_output_data_monitor_status_map from metaphor.dbt.cloud.client import DbtAdminAPIClient from metaphor.dbt.cloud.config import DbtCloudConfig -from metaphor.dbt.cloud.discovery_api import DiscoveryAPI, DiscoveryTestNode -from metaphor.dbt.config import DbtRunConfig -from metaphor.dbt.extractor import DbtExtractor -from metaphor.dbt.util import add_data_quality_monitor, get_data_platform_from_manifest +from metaphor.dbt.cloud.discovery_api import DiscoveryAPIClient +from metaphor.dbt.cloud.parser.parser import Parser +from metaphor.dbt.cloud.utils import parse_environment from metaphor.models.crawler_run_metadata import Platform -from metaphor.models.metadata_change_event import ( - DataPlatform, - Dataset, - DatasetLogicalID, - VirtualView, -) logger = get_logger() @@ -36,23 +29,30 @@ def from_config_file(config_file: str) -> "DbtCloudExtractor": def __init__(self, config: DbtCloudConfig): super().__init__(config) + self._config = config self._account_id = config.account_id self._job_ids = config.job_ids self._project_ids = config.project_ids - self._service_token = config.service_token - self._meta_ownerships = config.meta_ownerships - self._meta_tags = config.meta_tags - self._meta_key_tags = config.meta_key_tags self._base_url = config.base_url self._discovery_api_url = config.discovery_api_url - self._entities: Dict[int, Collection[ENTITY_TYPES]] = {} + self._project_accounts: Dict[int, str] = {} + self._entities: List[ENTITY_TYPES] = [] self._client = DbtAdminAPIClient( base_url=self._base_url, account_id=self._account_id, - service_token=self._service_token, + service_token=config.service_token, included_env_ids=config.environment_ids, ) + headers = { + "Authorization": f"Bearer {config.service_token}", + "Content-Type": "application/json", + } + self._discovery_api_client = DiscoveryAPIClient( + url=self._discovery_api_url, + headers=headers, + http_client=httpx.Client(timeout=None, headers=headers), + ) async def extract(self) -> Collection[ENTITY_TYPES]: logger.info("Fetching metadata from DBT cloud") @@ -61,11 +61,11 @@ async def extract(self) -> Collection[ENTITY_TYPES]: self._job_ids.update(self._client.get_project_jobs(project_id)) for job_id in self._job_ids: - await self._extract_last_run(job_id) + await self._extract_job(job_id) - return [item for ls in self._entities.values() for item in ls] + return self._entities - async def _extract_last_run(self, job_id: int): + async def _extract_job(self, job_id: int): if not self._client.is_job_included(job_id): logger.info(f"Ignoring job ID: {job_id}") return @@ -87,101 +87,23 @@ async def _extract_last_run(self, job_id: int): if account is not None: logger.info(f"Snowflake account: {account}") - manifest_json = self._client.get_run_artifact(run, "manifest.json") - logger.info(f"manifest.json saved to {manifest_json}") - - platform = get_data_platform_from_manifest(manifest_json) - docs_base_url = ( f"{self._base_url}/accounts/{self._account_id}/jobs/{run.job_id}/docs" ) - - try: - # Pass the path of the downloaded manifest file to the dbt Core extractor - entities = await DbtExtractor( - DbtRunConfig( - manifest=manifest_json, - run_results=None, # Instead of getting test results from `run_results.json`, we get them from discovery API after we parse the manifest - account=account, - docs_base_url=docs_base_url, - output=self._output, - meta_ownerships=self._meta_ownerships, - meta_tags=self._meta_tags, - meta_key_tags=self._meta_key_tags, - ) - ).extract() - - self._entities[run.run_id] = self._extend_test_run_results_entities( - platform, account, run.job_id, entities - ) - except Exception as e: - logger.exception(f"Failed to parse artifacts for run {run}") - self.extend_errors(e) - - def _extend_test_run_results_entities( - self, - platform: DataPlatform, - account: Optional[str], - job_id: int, - entities: Collection[ENTITY_TYPES], - ): - logger.info("Parsing test run results") - - discovery_api = DiscoveryAPI(self._discovery_api_url, self._service_token) - - new_monitor_datasets: List[Dataset] = list() - - # Get all test nodes from discovery API - test_nodes_by_model_uid: Dict[str, List[DiscoveryTestNode]] = defaultdict(list) - for test_node in discovery_api.get_all_job_tests(job_id): - for model in test_node.models: - test_nodes_by_model_uid[model].append(test_node) - - model_names = discovery_api.get_all_job_model_names(job_id) - - # Go thru the virtual views - for entity in entities: - if not isinstance(entity, VirtualView): - continue - if not entity.logical_id or not entity.logical_id.name: - continue - - model_unique_id = f"model.{entity.logical_id.name}" - - if ( - model_unique_id not in test_nodes_by_model_uid - or model_unique_id not in model_names - ): - continue - - dataset_logical_id = DatasetLogicalID( - name=model_names[model_unique_id], - platform=platform, - account=account, - ) - - dataset = Dataset( - logical_id=dataset_logical_id, - ) - - # Go thru the tests in this dbt model - for test_node in test_nodes_by_model_uid[model_unique_id]: - if not test_node.name: - continue - - status = dbt_run_result_output_data_monitor_status_map[ - test_node.status or "skipped" - ] - - add_data_quality_monitor( - dataset, - test_node.name, - test_node.columnName, - status, - test_node.executeCompletedAt, - ) - - if dataset.data_quality and dataset.data_quality.monitors: - new_monitor_datasets.append(dataset) - - return list(entities) + new_monitor_datasets + project_explore_url = f"{self._base_url}/explore/{self._account_id}/projects/{run.project_id}/environments/production/details" + + environment = self._discovery_api_client.get_environment_adapter_type( + run.environment_id + ).environment + platform, project_name = parse_environment(environment) + + job_run_parser = Parser( + self._discovery_api_client, + self._config, + platform, + account, + project_name, + docs_base_url, + project_explore_url=project_explore_url, + ) + self._entities.extend(job_run_parser.parse_run(run)) diff --git a/metaphor/dbt/cloud/parser/__init__.py b/metaphor/dbt/cloud/parser/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/metaphor/dbt/cloud/parser/common.py b/metaphor/dbt/cloud/parser/common.py new file mode 100644 index 00000000..1ce48861 --- /dev/null +++ b/metaphor/dbt/cloud/parser/common.py @@ -0,0 +1,60 @@ +from typing import Dict, List, Union + +from metaphor.common.entity_id import EntityId +from metaphor.common.utils import unique_list +from metaphor.dbt.util import get_virtual_view_id +from metaphor.models.metadata_change_event import ( + Dataset, + DbtMacro, + DbtMetric, + DbtModel, + VirtualView, +) + + +def parse_depends_on( + virtual_views: Dict[str, VirtualView], + depends_on: List[str], + source_map: Dict[str, EntityId], + macro_map: Dict[str, DbtMacro], + target: Union[DbtModel, DbtMetric], +): + if not depends_on: + return + + datasets, models, macros = None, None, None + + datasets = unique_list( + [str(source_map[n]) for n in depends_on if n.startswith("source.")] + ) + + models = unique_list( + [ + get_virtual_view_id(virtual_views[n].logical_id) # type: ignore + for n in depends_on + if n.startswith("model.") or n.startswith("snapshot.") + ] + ) + + macros = [ + macro_map[n] for n in depends_on if n.startswith("macro.") and n in macro_map + ] + + target.source_datasets = datasets if datasets else None + target.source_models = models if models else None + if isinstance(target, DbtModel): + target.macros = macros if macros else None + + +def dataset_has_parsed_fields( + dataset: Dataset, +) -> bool: + """ + init_dataset may generate irrelevant datasets, need to filter these out + """ + return ( + dataset.ownership_assignment is not None + or dataset.tag_assignment is not None + or dataset.documentation is not None + or dataset.data_quality is not None + ) diff --git a/metaphor/dbt/cloud/parser/dbt_macro_parser.py b/metaphor/dbt/cloud/parser/dbt_macro_parser.py new file mode 100644 index 00000000..d03c6ba9 --- /dev/null +++ b/metaphor/dbt/cloud/parser/dbt_macro_parser.py @@ -0,0 +1,75 @@ +from collections import defaultdict +from typing import Dict, List + +from metaphor.dbt.cloud.discovery_api import DiscoveryAPIClient +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_macros import ( + GetJobRunMacrosJobMacros, +) +from metaphor.dbt.cloud.discovery_api.generated.get_macro_arguments import ( + GetMacroArgumentsEnvironmentDefinitionMacrosEdges as GetMacroArgumentsEdge, +) +from metaphor.dbt.cloud.discovery_api.generated.input_types import MacroDefinitionFilter +from metaphor.models.metadata_change_event import DbtMacro, DbtMacroArgument + + +class MacroParser: + def __init__( + self, + discovery_api: DiscoveryAPIClient, + ) -> None: + self._discovery_api = discovery_api + + def _parse_macro_arguments(self, macros: List[GetJobRunMacrosJobMacros]): + macros_by_environment = defaultdict(list) + for macro in macros: + macros_by_environment[macro.environment_id].append(macro.unique_id) + + arguments: Dict[str, List[DbtMacroArgument]] = dict() + for environment_id, unique_ids in macros_by_environment.items(): + edges: List[GetMacroArgumentsEdge] = [] + after = None + while True: + environment = self._discovery_api.get_macro_arguments( + environment_id, + filter=MacroDefinitionFilter( + uniqueIds=unique_ids, + ), + after=after, + ).environment + definition = environment.definition + if not definition: + break + edges += definition.macros.edges + after = definition.macros.page_info.end_cursor + if not definition.macros.page_info.has_next_page: + break + + for edge in edges: + node = edge.node + arguments[node.unique_id] = [ + DbtMacroArgument( + name=arg.name, + type=arg.type, + description=arg.description, + ) + for arg in node.arguments + ] + return arguments + + def parse(self, macros: List[GetJobRunMacrosJobMacros]) -> Dict[str, DbtMacro]: + arguments = self._parse_macro_arguments(macros) + + macro_map: Dict[str, DbtMacro] = {} + for macro in macros: + macro_depends_on = [x for x in macro.depends_on if x.startswith("macro.")] + macro_map[macro.unique_id] = DbtMacro( + name=macro.name, + unique_id=macro.unique_id, + package_name=macro.package_name, + description=macro.description, + arguments=arguments.get(macro.unique_id), + sql=macro.macro_sql, + depends_on_macros=macro_depends_on if macro_depends_on else None, + ) + + return macro_map diff --git a/metaphor/dbt/cloud/parser/dbt_metric_parser.py b/metaphor/dbt/cloud/parser/dbt_metric_parser.py new file mode 100644 index 00000000..0845f774 --- /dev/null +++ b/metaphor/dbt/cloud/parser/dbt_metric_parser.py @@ -0,0 +1,66 @@ +from typing import Dict, Optional + +from metaphor.common.entity_id import EntityId +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_metrics import ( + GetJobRunMetricsJobMetrics as JobMetric, +) +from metaphor.dbt.cloud.parser.common import parse_depends_on +from metaphor.dbt.util import build_metric_docs_url, build_system_tags, init_metric +from metaphor.models.metadata_change_event import ( + DbtMacro, + DbtMetric, + EntityUpstream, + Metric, + MetricFilter, + VirtualView, +) + + +class MetricParser: + def __init__( + self, + metrics: Dict[str, Metric], + virtual_views: Dict[str, VirtualView], + docs_base_url: Optional[str], + ) -> None: + self._metrics = metrics + self._virtual_views = virtual_views + self._docs_base_url = docs_base_url + + def parse( + self, + metric: JobMetric, + source_map: Dict[str, EntityId], + macro_map: Dict[str, DbtMacro], + ) -> None: + + metric_entity = init_metric(self._metrics, metric.unique_id) + metric_entity.dbt_metric = DbtMetric( + package_name=metric.package_name, + description=metric.description or None, + label=metric.label, + timestamp=metric.timestamp, + time_grains=metric.time_grains, + dimensions=metric.dimensions, + filters=[ + MetricFilter(field=f.field, operator=f.operator, value=f.value) + for f in metric.filters + ], + url=build_metric_docs_url(self._docs_base_url, metric.unique_id), + sql=metric.expression or metric.sql, + type=metric.calculation_method or metric.type, + ) + if metric.tags: + metric_entity.system_tags = build_system_tags(metric.tags) + + parse_depends_on( + self._virtual_views, + metric.depends_on, + source_map, + macro_map, + metric_entity.dbt_metric, + ) + + metric_entity.entity_upstream = EntityUpstream( + source_entities=metric_entity.dbt_metric.source_models, + ) diff --git a/metaphor/dbt/cloud/parser/dbt_node_parser.py b/metaphor/dbt/cloud/parser/dbt_node_parser.py new file mode 100644 index 00000000..ab099cd2 --- /dev/null +++ b/metaphor/dbt/cloud/parser/dbt_node_parser.py @@ -0,0 +1,268 @@ +import json +from typing import Any, Dict, List, Optional, Union, cast + +from metaphor.common.entity_id import EntityId, parts_to_dataset_entity_id +from metaphor.common.utils import unique_list +from metaphor.dbt.cloud.config import DbtCloudConfig +from metaphor.dbt.cloud.discovery_api import DiscoveryAPIClient +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModels, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_snapshots import ( + GetJobRunSnapshotsJobSnapshots, +) +from metaphor.dbt.cloud.parser.common import parse_depends_on +from metaphor.dbt.util import ( + build_model_docs_url, + build_system_tags, + get_dbt_tags_from_meta, + get_metaphor_tags_from_meta, + get_model_name_from_unique_id, + get_ownerships_from_meta, + get_snapshot_name_from_unique_id, + init_dataset, + init_field, + init_virtual_view, +) +from metaphor.models.metadata_change_event import ( + AssetStructure, + ColumnTagAssignment, + DataPlatform, + Dataset, + DbtMacro, + DbtMaterialization, + DbtMaterializationType, + DbtMetadataItem, + DbtModel, + EntityUpstream, + Metric, + OwnershipAssignment, + TagAssignment, + VirtualView, +) + +NODE_TYPE = Union[GetJobRunModelsJobModels, GetJobRunSnapshotsJobSnapshots] + + +class NodeParser: + def __init__( + self, + discovery_api: DiscoveryAPIClient, + config: DbtCloudConfig, + platform: DataPlatform, + account: Optional[str], + docs_base_url: Optional[str], + project_explore_url: str, + datasets: Dict[str, Dataset], + virtual_views: Dict[str, VirtualView], + metrics: Dict[str, Metric], + ) -> None: + self._discovery_api = discovery_api + self._platform = platform + self._account = account + self._docs_base_url = docs_base_url + self._project_explore_base_url = project_explore_url + self._meta_ownerships = config.meta_ownerships + self._meta_tags = config.meta_tags + self._meta_key_tags = config.meta_key_tags + self._datasets = datasets + self._virtual_views = virtual_views + self._metrics = metrics + + self._env_file_path: Dict[int, str] = dict() + + @staticmethod + def get_node_name(node: NODE_TYPE): + if isinstance(node, GetJobRunModelsJobModels): + return get_model_name_from_unique_id + return get_snapshot_name_from_unique_id + + def _parse_model_meta( + self, model: GetJobRunModelsJobModels, virtual_view: VirtualView + ) -> None: + if model.materialized_type is None or model.materialized_type.upper() in [ + "EPHEMERAL", + "OTHER", + ]: + return + + if not model.meta: + return + + # v3 use 'model.config.meta' while v1, v2 use 'model.meta' + table = model.alias or model.name + if not model.database or not model.schema_ or not table: + return + + dataset = init_dataset( + self._datasets, + model.database, + model.schema_, + table, + self._platform, + self._account, + model.unique_id, + ) + + # Assign ownership & tags to materialized table/view + ownerships = get_ownerships_from_meta(model.meta, self._meta_ownerships) + if len(ownerships.materialized_table) > 0: + dataset.ownership_assignment = OwnershipAssignment( + ownerships=ownerships.materialized_table + ) + if len(ownerships.dbt_model) > 0: + virtual_view.ownership_assignment = OwnershipAssignment( + ownerships=ownerships.dbt_model + ) + + tag_names = get_metaphor_tags_from_meta(model.meta, self._meta_tags) + if len(tag_names) > 0: + dataset.tag_assignment = TagAssignment(tag_names=tag_names) + + # Capture the whole "meta" field as key-value pairs + if len(model.meta) > 0: + assert virtual_view.dbt_model + virtual_view.dbt_model.meta = [ + DbtMetadataItem(key=key, value=json.dumps(value)) + for key, value in cast(Dict[str, Any], model.meta).items() + ] + + def _parse_model_materialization( + self, node: GetJobRunModelsJobModels, dbt_model: DbtModel + ) -> None: + materialized = node.materialized_type + if materialized is None: + return + + try: + materialization_type = DbtMaterializationType[materialized.upper()] + except KeyError: + materialization_type = DbtMaterializationType.OTHER + + dbt_model.materialization = DbtMaterialization( + type=materialization_type, + target_dataset=str(self._get_node_entity_id(node)), + ) + + def _get_node_entity_id(self, node: NODE_TYPE) -> EntityId: + return parts_to_dataset_entity_id( + self._platform, + self._account, + node.database, + node.schema_, + node.alias or node.name, + ) + + def _parse_node_columns(self, node: NODE_TYPE, dbt_model: DbtModel) -> None: + if dbt_model.fields is None: + return + if node.columns is not None: + for col in node.columns: + if not col.name: + continue + column_name = col.name.lower() + field = init_field(dbt_model.fields, column_name) + field.description = col.description + field.native_type = col.type or "Not Set" + field.tags = col.tags + + if col.meta is not None: + self._parse_column_meta(node, column_name, col.meta) + + def _parse_column_meta(self, node: NODE_TYPE, column_name: str, meta: Dict) -> None: + table = node.alias or node.name + if not node.database or not node.schema_ or not table: + return + + tag_names = get_metaphor_tags_from_meta(meta, self._meta_tags) + if len(tag_names) == 0: + return + + dataset = init_dataset( + self._datasets, + node.database, + node.schema_, + table, + self._platform, + self._account, + node.unique_id, + ) + if dataset.tag_assignment is None: + dataset.tag_assignment = TagAssignment() + + if dataset.tag_assignment.column_tag_assignments is None: + dataset.tag_assignment.column_tag_assignments = [] + + dataset.tag_assignment.column_tag_assignments.append( + ColumnTagAssignment( + column_name=column_name, + tag_names=tag_names, + ) + ) + + def _init_dbt_model(self, node: NODE_TYPE, virtual_view: VirtualView): + virtual_view.dbt_model = DbtModel( + package_name=node.package_name, + description=node.description or None, + url=f"{self._project_explore_base_url}/{node.unique_id}", + docs_url=build_model_docs_url(self._docs_base_url, node.unique_id), + fields=[], + ) + return virtual_view.dbt_model + + def _set_system_tags(self, node: NODE_TYPE, virtual_view: VirtualView): + # Treat dbt tags as system tags + tags: List[str] = unique_list( + get_dbt_tags_from_meta(node.meta, self._meta_key_tags) + + (node.tags if node.tags else []) + ) + + if len(tags) > 0: + virtual_view.system_tags = build_system_tags(tags) + + def _set_entity_upstream(self, virtual_view: VirtualView, dbt_model: DbtModel): + source_entities = [] + if dbt_model.source_datasets is not None: + source_entities.extend(dbt_model.source_datasets) + if dbt_model.source_models is not None: + source_entities.extend(dbt_model.source_models) + if len(source_entities) > 0: + virtual_view.entity_upstream = EntityUpstream( + source_entities=source_entities, + ) + + def parse( + self, + node: NODE_TYPE, + source_map: Dict[str, EntityId], + macro_map: Dict[str, DbtMacro], + ): + node_name_getter = self.get_node_name(node) + virtual_view = init_virtual_view( + self._virtual_views, node.unique_id, node_name_getter + ) + + # Extract project directory from the model's unique id + # Split by ".", and ditch the model name + directory = node_name_getter(node.unique_id).rsplit(".")[0] + virtual_view.structure = AssetStructure( + directories=[directory], + name=node.name, + ) + + dbt_model = self._init_dbt_model(node, virtual_view) + self._set_system_tags(node, virtual_view) + + # raw_sql & complied_sql got renamed to raw_code & complied_code in V7 + dbt_model.raw_sql = node.raw_code or node.raw_sql + dbt_model.compiled_sql = node.compiled_code or node.compiled_sql + + if isinstance(node, GetJobRunModelsJobModels): + self._parse_model_meta(node, virtual_view) + self._parse_model_materialization(node, dbt_model) + parse_depends_on( + self._virtual_views, node.depends_on, source_map, macro_map, dbt_model + ) + + self._parse_node_columns(node, dbt_model) + self._set_entity_upstream(virtual_view, dbt_model) diff --git a/metaphor/dbt/cloud/parser/dbt_source_parser.py b/metaphor/dbt/cloud/parser/dbt_source_parser.py new file mode 100644 index 00000000..58e58479 --- /dev/null +++ b/metaphor/dbt/cloud/parser/dbt_source_parser.py @@ -0,0 +1,67 @@ +from typing import Dict, List, Optional + +from metaphor.common.entity_id import EntityId, parts_to_dataset_entity_id +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_sources import ( + GetJobRunSourcesJobSources, +) +from metaphor.dbt.util import init_dataset, init_documentation, init_field_doc +from metaphor.models.metadata_change_event import DataPlatform, Dataset + + +class SourceParser: + def __init__( + self, + datasets: Dict[str, Dataset], + platform: DataPlatform, + account: Optional[str], + ): + self._datasets = datasets + self._platform = platform + self._account = account + + def _parse_source(self, source: GetJobRunSourcesJobSources) -> None: + if ( + not source.database + or not source.columns + or not source.schema_ + or not source.identifier + ): + return + + dataset = init_dataset( + self._datasets, + source.database, + source.schema_, + source.identifier, + self._platform, + self._account, + source.unique_id, + ) + + init_documentation(dataset) + assert dataset.documentation is not None + if source.description: + dataset.documentation.dataset_documentations = [source.description] + + for col in source.columns: + if col.description: + if not col.name: + continue + column_name = col.name.lower() + field_doc = init_field_doc(dataset, column_name) + field_doc.documentation = col.description + + def parse(self, sources: List[GetJobRunSourcesJobSources]) -> Dict[str, EntityId]: + source_map: Dict[str, EntityId] = {} + for source in sources: + assert source.database is not None and source.schema_ and source.identifier + source_map[source.unique_id] = parts_to_dataset_entity_id( + self._platform, + self._account, + source.database, + source.schema_, + source.identifier, + ) + self._parse_source(source) + + return source_map diff --git a/metaphor/dbt/cloud/parser/dbt_test_parser.py b/metaphor/dbt/cloud/parser/dbt_test_parser.py new file mode 100644 index 00000000..714d069d --- /dev/null +++ b/metaphor/dbt/cloud/parser/dbt_test_parser.py @@ -0,0 +1,157 @@ +from datetime import datetime +from typing import Dict, List, Optional + +from metaphor.common.logger import get_logger +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModels as Model, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModelsRunResults as RunResult, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_tests import ( + GetJobRunTestsJobTests as Test, +) +from metaphor.dbt.util import add_data_quality_monitor, init_dataset, init_dbt_tests +from metaphor.models.metadata_change_event import ( + DataMonitorStatus, + DataPlatform, + Dataset, + DbtTest, + VirtualView, +) + +logger = get_logger() +dbt_run_result_output_data_monitor_status_map: Dict[str, DataMonitorStatus] = { + "warn": DataMonitorStatus.WARNING, + "skipped": DataMonitorStatus.UNKNOWN, + "error": DataMonitorStatus.ERROR, + "fail": DataMonitorStatus.ERROR, + "runtime error": DataMonitorStatus.ERROR, + "pass": DataMonitorStatus.PASSED, + "success": DataMonitorStatus.PASSED, +} + + +class TestParser: + def __init__( + self, + platform: DataPlatform, + account: Optional[str], + virtual_views: Dict[str, VirtualView], + datasets: Dict[str, Dataset], + ) -> None: + self._platform = platform + self._account = account + self._virtual_views = virtual_views + self._datasets = datasets + + def parse( + self, + test: Test, + models: Dict[str, Model], + ) -> None: + # check test is referring a model + if not test.depends_on: + return + + model_unique_id = next( + (n for n in test.depends_on if n.startswith("model.")), None + ) + if not model_unique_id: + return + + # Skip test if it references an non-existing (most likely disabled) model + if model_unique_id not in self._virtual_views: + logger.warn( + f"Test {test.unique_id} references non-active model {model_unique_id}" + ) + return + + if model_unique_id not in models: + return + + model = models[model_unique_id] + + dbt_test = DbtTest( + name=test.name, + unique_id=test.unique_id, + columns=[test.column_name] if test.column_name else [], + depends_on_macros=[n for n in test.depends_on if n.startswith("macro.")], + ) + + # V7 renamed "compiled_sql" to "compiled_code" + dbt_test.sql = test.compiled_code or test.compiled_sql + + init_dbt_tests(self._virtual_views, model_unique_id).append(dbt_test) + + if model.run_results: + self._parse_test_run_result( + test, models[model_unique_id], model.run_results + ) + + @staticmethod + def _get_run_result_executed_completed_at( + run_result: RunResult, + ) -> Optional[datetime]: + if isinstance(run_result.execute_completed_at, datetime): + return run_result.execute_completed_at + if isinstance(run_result.execute_completed_at, str): + completed_at = run_result.execute_completed_at + if completed_at.endswith("Z"): + # Convert Zulu to +00:00 + completed_at = f"{completed_at[:-1]}+00:00" + try: + return datetime.fromisoformat(completed_at) + except Exception: + return None + return None + + def _parse_test_run_result( + self, + test: Test, + model: Model, + run_results: List[RunResult], + ) -> None: + model_name = model.alias or model.name + if model.database is None or model.schema_ is None or model_name is None: + logger.warning(f"Skipping model without name, {model.unique_id}") + return + + if not test.name: + return + + if not run_results: + logger.warning(f"Skipping test without run_results, {model.unique_id}") + return + + def run_result_key(run_result: RunResult): + completed_at = self._get_run_result_executed_completed_at(run_result) + if not completed_at: + return 0 + return completed_at.timestamp() + + run_result = next( + ( + n + for n in sorted(run_results, key=run_result_key, reverse=True) + if n.status + ), + None, + ) + if run_result is None or run_result.status is None: + logger.warning(f"No valid run_result found: {run_results}") + return + + dataset = init_dataset( + self._datasets, + model.database, + model.schema_, + model_name, + self._platform, + self._account, + model.unique_id, + ) + + status = dbt_run_result_output_data_monitor_status_map[run_result.status] + last_run = self._get_run_result_executed_completed_at(run_result) + add_data_quality_monitor(dataset, test.name, test.column_name, status, last_run) diff --git a/metaphor/dbt/cloud/parser/parser.py b/metaphor/dbt/cloud/parser/parser.py new file mode 100644 index 00000000..fd650669 --- /dev/null +++ b/metaphor/dbt/cloud/parser/parser.py @@ -0,0 +1,165 @@ +import time +from typing import Dict, List, Optional, Set + +from metaphor.common.event_util import ENTITY_TYPES +from metaphor.common.logger import get_logger +from metaphor.common.snowflake import normalize_snowflake_account +from metaphor.dbt.cloud.client import DbtRun +from metaphor.dbt.cloud.config import DbtCloudConfig +from metaphor.dbt.cloud.discovery_api import DiscoveryAPIClient +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModels as Model, +) +from metaphor.dbt.cloud.parser.common import dataset_has_parsed_fields +from metaphor.dbt.cloud.parser.dbt_macro_parser import MacroParser +from metaphor.dbt.cloud.parser.dbt_metric_parser import MetricParser +from metaphor.dbt.cloud.parser.dbt_node_parser import NodeParser +from metaphor.dbt.cloud.parser.dbt_source_parser import SourceParser +from metaphor.dbt.cloud.parser.dbt_test_parser import TestParser +from metaphor.dbt.util import init_virtual_view +from metaphor.models.metadata_change_event import ( + DataPlatform, + Dataset, + Metric, + VirtualView, +) + +logger = get_logger() + + +class Parser: + def __init__( + self, + discovery_api: DiscoveryAPIClient, + config: DbtCloudConfig, + platform: DataPlatform, + account: Optional[str], + project_name: Optional[str], + docs_base_url: str, + project_explore_url: str, + ) -> None: + self._discovery_api = discovery_api + self._platform = platform + self._account = account + self._config = config + + self._datasets: Dict[str, Dataset] = {} + self._virtual_views: Dict[str, VirtualView] = {} + self._metrics: Dict[str, Metric] = {} + self._referenced_virtual_views: Set[str] = set() + + self._project_name = project_name + if self._account and platform == DataPlatform.SNOWFLAKE: + self._account = normalize_snowflake_account(self._account) + + self._source_parser = SourceParser( + self._datasets, self._platform, self._account + ) + self._macro_parser = MacroParser(self._discovery_api) + self._node_parser = NodeParser( + self._discovery_api, + self._config, + self._platform, + self._account, + docs_base_url, + project_explore_url, + self._datasets, + self._virtual_views, + self._metrics, + ) + self._test_parser = TestParser( + self._platform, + self._account, + self._virtual_views, + self._datasets, + ) + self._metric_parser = MetricParser( + self._metrics, + self._virtual_views, + docs_base_url, + ) + + def _get_source_map(self, run: DbtRun): + job_run_sources = self._discovery_api.get_job_run_sources( + run.job_id, run.run_id + ) + assert job_run_sources.job + return self._source_parser.parse(job_run_sources.job.sources) + + def _get_macro_map(self, run: DbtRun): + + job_run_macros = self._discovery_api.get_job_run_macros(run.job_id, run.run_id) + assert job_run_macros.job + return self._macro_parser.parse(job_run_macros.job.macros) + + def _get_nodes(self, run: DbtRun): + job_run_models = self._discovery_api.get_job_run_models(run.job_id, run.run_id) + assert job_run_models.job + job_run_snapshots = self._discovery_api.get_job_run_snapshots( + run.job_id, run.run_id + ) + assert job_run_snapshots.job + return job_run_models.job.models + job_run_snapshots.job.snapshots + + def _get_tests(self, run: DbtRun): + job_run_tests = self._discovery_api.get_job_run_tests(run.job_id, run.run_id) + assert job_run_tests.job + return job_run_tests.job.tests + + def _get_metrics(self, run: DbtRun): + job_run_metrics = self._discovery_api.get_job_run_metrics( + run.job_id, run.run_id + ) + assert job_run_metrics.job + return job_run_metrics.job.metrics + + def parse_run(self, run: DbtRun): + """ + Parses a single job run. + """ + start = time.time() + nodes = self._get_nodes(run) + models: Dict[str, Model] = dict() + for node in nodes: + init_virtual_view( + self._virtual_views, node.unique_id, NodeParser.get_node_name(node) + ) + if self._project_name and node.package_name != self._project_name: + self._referenced_virtual_views.add(node.unique_id) + if isinstance(node, Model): + models[node.unique_id] = node + + if not self._virtual_views: + logger.info( + f"Fetched job ID: no entity to parse. Elapsed time: {time.time() - start} secs." + ) + return [] + + source_map = self._get_source_map(run) + macro_map = self._get_macro_map(run) + + for node in nodes: + self._node_parser.parse(node, source_map, macro_map) + + for test in self._get_tests(run): + self._test_parser.parse(test, models) + + for metric in self._get_metrics(run): + self._metric_parser.parse(metric, source_map, macro_map) + + entities: List[ENTITY_TYPES] = [] + entities.extend( + dataset + for dataset in self._datasets.values() + if dataset_has_parsed_fields(dataset) + ) + entities.extend( + v + for k, v in self._virtual_views.items() + if k not in self._referenced_virtual_views + ) + entities.extend(self._metrics.values()) + logger.info( + f"Fetched job ID: {run.job_id} and parsed {len(entities)} entities. Elapsed time: {time.time() - start} secs." + ) + return entities diff --git a/metaphor/dbt/cloud/utils.py b/metaphor/dbt/cloud/utils.py new file mode 100644 index 00000000..ca9d4d83 --- /dev/null +++ b/metaphor/dbt/cloud/utils.py @@ -0,0 +1,19 @@ +from metaphor.dbt.cloud.discovery_api.generated.get_environment_adapter_type import ( + GetEnvironmentAdapterTypeEnvironment, +) +from metaphor.models.metadata_change_event import DataPlatform + + +def parse_environment(environment: GetEnvironmentAdapterTypeEnvironment): + adapter_type = ( + environment.adapter_type or "unknown" + ) # It's possible for the environment to not have an adapter type! + adapter_type = adapter_type.upper() + if adapter_type == "DATABRICKS": + platform = DataPlatform.UNITY_CATALOG + else: + assert ( + adapter_type in DataPlatform.__members__ + ), f"Invalid data platform {adapter_type}" + platform = DataPlatform[adapter_type] + return platform, environment.dbt_project_name diff --git a/poetry.lock b/poetry.lock index 883b7e0c..9f855e39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -217,6 +217,32 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] +[[package]] +name = "ariadne-codegen" +version = "0.14.0" +description = "Generate fully typed GraphQL client from schema, queries and mutations!" +optional = false +python-versions = "*" +files = [ + {file = "ariadne_codegen-0.14.0-py2.py3-none-any.whl", hash = "sha256:e0553960bd298ddc49aeeb748a0be3a37acbdfdee7d7c26fce7154715fa826e0"}, + {file = "ariadne_codegen-0.14.0.tar.gz", hash = "sha256:d5b15470195c3858b88a2bae855b5c04602ffae40228077b6cfb26a57d3f14f6"}, +] + +[package.dependencies] +autoflake = "*" +black = "*" +click = ">=8.1,<9.0" +graphql-core = ">=3.2.0,<3.3" +httpx = ">=0.23,<1.0" +isort = "*" +pydantic = ">=2.0.0,<3.0.0" +toml = ">=0.10,<1.0" + +[package.extras] +dev = ["ariadne", "freezegun", "mypy", "pylint", "pytest", "pytest-asyncio", "pytest-httpx", "pytest-mock", "requests-toolbelt", "types-toml"] +opentelemetry = ["opentelemetry-api"] +subscriptions = ["websockets (>=11.0,<12.0)"] + [[package]] name = "asn1crypto" version = "1.5.1" @@ -338,6 +364,21 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "autoflake" +version = "2.3.1" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, + {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + [[package]] name = "avro" version = "1.11.3" @@ -1910,7 +1951,7 @@ websockets = ["websockets (>=10,<12)"] name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -optional = true +optional = false python-versions = ">=3.6,<4" files = [ {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, @@ -4850,7 +4891,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -5248,19 +5288,19 @@ six = "*" [[package]] name = "setuptools" -version = "71.0.4" +version = "72.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "setuptools-71.0.4-py3-none-any.whl", hash = "sha256:ed2feca703be3bdbd94e6bb17365d91c6935c6b2a8d0bb09b66a2c435ba0b1a5"}, - {file = "setuptools-71.0.4.tar.gz", hash = "sha256:48297e5d393a62b7cb2a10b8f76c63a73af933bd809c9e0d0d6352a1a0135dd8"}, + {file = "setuptools-72.0.0-py3-none-any.whl", hash = "sha256:98b4d786a12fadd34eabf69e8d014b84e5fc655981e4ff419994700434ace132"}, + {file = "setuptools-72.0.0.tar.gz", hash = "sha256:5a0d9c6a2f332881a0153f629d8000118efd33255cfa802757924c53312c76da"}, ] [package.extras] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "sgqlc" @@ -6408,4 +6448,4 @@ unity-catalog = ["databricks-sdk", "databricks-sql-connector", "sqlglot"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "25c1270cfef037e830b9d7bc04036ce6b1ba638c709e6ae79d900c6e99d163ed" +content-hash = "bebcc55b8c2a05d4940b52cfe86522b1a5b3b2a582fb5cde8c51f6e3d3075236" diff --git a/pyproject.toml b/pyproject.toml index 447efb8d..b1454535 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "metaphor-connectors" -version = "0.14.59" +version = "0.14.60" license = "Apache-2.0" description = "A collection of Python-based 'connectors' that extract metadata from various sources to ingest into the Metaphor app." authors = ["Metaphor "] @@ -143,6 +143,7 @@ trino = ["trino"] unity_catalog = ["databricks-sdk", "databricks-sql-connector", "sqlglot"] [tool.poetry.dev-dependencies] +ariadne-codegen = "^0.14.0" bandit = "^1.7.2" black = "^24.3.0" coverage = "^7.1.0" @@ -196,7 +197,7 @@ ignore_missing_imports = true plugins = ["pydantic.mypy"] [tool.bandit] -exclude_dirs = ["venv"] +exclude_dirs = ["venv", "metaphor/dbt/cloud/discovery_api/generated"] skips = [ 'B101', 'B106', @@ -205,3 +206,14 @@ skips = [ 'B607', 'B608' ] + +[tool.coverage.run] +source = [ + "metaphor" +] +omit = [ + # TODO(SC-14236): Include __init__.py back to coverage after fixing async testing issues + "**/__init__.py", + # Ignore auto-generated files + "**/generated/*" +] diff --git a/tests/dbt/cloud/expected.json b/tests/dbt/cloud/expected.json new file mode 100644 index 00000000..5e7255e9 --- /dev/null +++ b/tests/dbt/cloud/expected.json @@ -0,0 +1,2785 @@ +[ + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:41.238000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_type", + "dataset": "DATASET~9916513B634F4B8103E337055A33BF4B" + } + ], + "title": "accepted_values_customers_customer_type__new__returning" + }, + { + "lastRun": "2024-07-24T04:01:41.238000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_id", + "dataset": "DATASET~9916513B634F4B8103E337055A33BF4B" + } + ], + "title": "not_null_customers_customer_id" + }, + { + "lastRun": "2024-07-24T04:01:41.238000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_id", + "dataset": "DATASET~9916513B634F4B8103E337055A33BF4B" + } + ], + "title": "unique_customers_customer_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.customers", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:39.404000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_id", + "dataset": "DATASET~2628FDFF9D0CDB0AA4E3FC0FFBED02B8" + } + ], + "title": "not_null_orders_order_id" + }, + { + "lastRun": "2024-07-24T04:01:39.404000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_id", + "dataset": "DATASET~2628FDFF9D0CDB0AA4E3FC0FFBED02B8" + } + ], + "title": "relationships_orders_customer_id__customer_id__ref_stg_customers_" + }, + { + "lastRun": "2024-07-24T04:01:39.404000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_id", + "dataset": "DATASET~2628FDFF9D0CDB0AA4E3FC0FFBED02B8" + } + ], + "title": "unique_orders_order_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.orders", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:35.949000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_id", + "dataset": "DATASET~512557BF25DF9EAEE5584140A235E03D" + } + ], + "title": "not_null_stg_customers_customer_id" + }, + { + "lastRun": "2024-07-24T04:01:35.949000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "customer_id", + "dataset": "DATASET~512557BF25DF9EAEE5584140A235E03D" + } + ], + "title": "unique_stg_customers_customer_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_customers", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:35.994000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "location_id", + "dataset": "DATASET~30CDC644AD658B552B89094BA9B3E99C" + } + ], + "title": "not_null_stg_locations_location_id" + }, + { + "lastRun": "2024-07-24T04:01:35.994000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "location_id", + "dataset": "DATASET~30CDC644AD658B552B89094BA9B3E99C" + } + ], + "title": "unique_stg_locations_location_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_locations", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:35.953000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_item_id", + "dataset": "DATASET~D1E6B89E5891621E2DB93165B5828719" + } + ], + "title": "not_null_stg_order_items_order_item_id" + }, + { + "lastRun": "2024-07-24T04:01:35.953000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_item_id", + "dataset": "DATASET~D1E6B89E5891621E2DB93165B5828719" + } + ], + "title": "unique_stg_order_items_order_item_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_order_items", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:37.610000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_id", + "dataset": "DATASET~757BF2FC19B9BBD0DBAFD90768F2DC8B" + } + ], + "title": "not_null_stg_orders_order_id" + }, + { + "lastRun": "2024-07-24T04:01:37.610000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "order_id", + "dataset": "DATASET~757BF2FC19B9BBD0DBAFD90768F2DC8B" + } + ], + "title": "unique_stg_orders_order_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_orders", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:36.921000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "product_id", + "dataset": "DATASET~2932BAF66BF93D64D6AC2F457698EB52" + } + ], + "title": "not_null_stg_products_product_id" + }, + { + "lastRun": "2024-07-24T04:01:36.921000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "product_id", + "dataset": "DATASET~2932BAF66BF93D64D6AC2F457698EB52" + } + ], + "title": "unique_stg_products_product_id" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_products", + "platform": "SNOWFLAKE" + } + }, + { + "dataQuality": { + "monitors": [ + { + "lastRun": "2024-07-24T04:01:37.035000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "supply_uuid", + "dataset": "DATASET~A808B6BE5F80DA3A550D49C92E8F2D89" + } + ], + "title": "not_null_stg_supplies_supply_uuid" + }, + { + "lastRun": "2024-07-24T04:01:37.035000+00:00", + "status": "PASSED", + "targets": [ + { + "column": "supply_uuid", + "dataset": "DATASET~A808B6BE5F80DA3A550D49C92E8F2D89" + } + ], + "title": "unique_stg_supplies_supply_uuid" + } + ], + "provider": "DBT" + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.jaffle_shop.stg_supplies", + "platform": "SNOWFLAKE" + } + }, + { + "dbtModel": { + "compiledSql": "\\n\\nwith\\n\\ncustomers as (\\n\\n select * from acme.jaffle_shop.stg_customers\\n\\n),\\n\\norders_mart as (\\n\\n select * from acme.jaffle_shop.orders\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from acme.jaffle_shop.order_items\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "description": "Customer overview data mart, offering key details for each unique customer. One row per customer.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.customers", + "fields": [ + { + "description": "The unique key of the orders mart.", + "fieldName": "customer_id", + "fieldPath": "customer_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Customers' full name.", + "fieldName": "customer_name", + "fieldPath": "customer_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Total number of orders a customer has ever placed.", + "fieldName": "count_lifetime_orders", + "fieldPath": "count_lifetime_orders", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "The timestamp when a customer placed their first order.", + "fieldName": "first_ordered_at", + "fieldPath": "first_ordered_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "description": "The timestamp of a customer's most recent order.", + "fieldName": "last_ordered_at", + "fieldPath": "last_ordered_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "description": "The sum of all the pre-tax subtotals of every order a customer has placed.", + "fieldName": "lifetime_spend_pretax", + "fieldPath": "lifetime_spend_pretax", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "The sum of all the order totals (including tax) that a customer has ever placed.", + "fieldName": "lifetime_spend", + "fieldPath": "lifetime_spend", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Options are 'new' or 'returning', indicating if a customer has ordered more than once or has only placed their first order to date.", + "fieldName": "customer_type", + "fieldPath": "customer_type", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~9916513B634F4B8103E337055A33BF4B", + "type": "TABLE" + }, + "packageName": "jaffle_shop", + "rawSql": "{{\\n config(\\n materialized='table'\\n )\\n}}\\n\\nwith\\n\\ncustomers as (\\n\\n select * from {{ ref('stg_customers') }}\\n\\n),\\n\\norders_mart as (\\n\\n select * from {{ ref('orders') }}\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from {{ ref('order_items') }}\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "sourceModels": [ + "VIRTUAL_VIEW~FE32A419A352C0C5890E84AB1F6F25B3", + "VIRTUAL_VIEW~A01D8D20460F0A5E1002EA6E565963AE", + "VIRTUAL_VIEW~F3CAA2D12722D7A5CB634D3277D91846" + ], + "tests": [ + { + "columns": [ + "customer_type" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_accepted_values" + ], + "name": "accepted_values_customers_customer_type__new__returning", + "sql": "\\n \\n \\n\\nwith all_values as (\\n\\n select\\n customer_type as value_field,\\n count(*) as n_records\\n\\n from acme.jaffle_shop.customers\\n group by customer_type\\n\\n)\\n\\nselect *\\nfrom all_values\\nwhere value_field not in (\\n 'new','returning'\\n)\\n\\n\\n", + "uniqueId": "test.jaffle_shop.accepted_values_customers_customer_type__new__returning.d12f0947c8" + }, + { + "columns": [ + "customer_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_customers_customer_id", + "sql": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d" + }, + { + "columns": [ + "customer_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_customers_customer_id", + "sql": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.customers" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~FE32A419A352C0C5890E84AB1F6F25B3", + "VIRTUAL_VIEW~A01D8D20460F0A5E1002EA6E565963AE", + "VIRTUAL_VIEW~F3CAA2D12722D7A5CB634D3277D91846" + ] + }, + "logicalId": { + "name": "jaffle_shop.customers", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "customers" + } + }, + { + "dbtModel": { + "compiledSql": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n\\n with date_spine as\\n(\\n\\n \\n\\n\\n\\n\\n\\nwith rawdata as (\\n\\n \\n\\n \\n\\n with p as (\\n select 0 as generated_number union all select 1\\n ), unioned as (\\n\\n select\\n\\n \\n p0.generated_number * power(2, 0)\\n + \\n \\n p1.generated_number * power(2, 1)\\n + \\n \\n p2.generated_number * power(2, 2)\\n + \\n \\n p3.generated_number * power(2, 3)\\n + \\n \\n p4.generated_number * power(2, 4)\\n + \\n \\n p5.generated_number * power(2, 5)\\n + \\n \\n p6.generated_number * power(2, 6)\\n + \\n \\n p7.generated_number * power(2, 7)\\n + \\n \\n p8.generated_number * power(2, 8)\\n + \\n \\n p9.generated_number * power(2, 9)\\n + \\n \\n p10.generated_number * power(2, 10)\\n + \\n \\n p11.generated_number * power(2, 11)\\n \\n \\n + 1\\n as generated_number\\n\\n from\\n\\n \\n p as p0\\n cross join \\n \\n p as p1\\n cross join \\n \\n p as p2\\n cross join \\n \\n p as p3\\n cross join \\n \\n p as p4\\n cross join \\n \\n p as p5\\n cross join \\n \\n p as p6\\n cross join \\n \\n p as p7\\n cross join \\n \\n p as p8\\n cross join \\n \\n p as p9\\n cross join \\n \\n p as p10\\n cross join \\n \\n p as p11\\n \\n \\n\\n )\\n\\n select *\\n from unioned\\n where generated_number <= 3651\\n order by generated_number\\n\\n\\n\\n),\\n\\nall_periods as (\\n\\n select (\\n \\n\\n dateadd(\\n day,\\n row_number() over (order by 1) - 1,\\n \\n\\n dateadd(\\n day,\\n -3650,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n\\n )\\n\\n\\n ) as date_day\\n from rawdata\\n\\n),\\n\\nfiltered as (\\n\\n select *\\n from all_periods\\n where date_day <= cast(\\n\\n dateadd(\\n day,\\n 1,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n as date)\\n\\n)\\n\\nselect * from filtered\\n\\n\\n\\n)\\nselect\\n cast(d.date_day as timestamp) as date_day\\nfrom\\n date_spine d\\n\\n\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.metricflow_time_spine", + "fields": [ + { + "fieldName": "date_day", + "fieldPath": "date_day", + "nativeType": "DATE", + "tags": [] + } + ], + "macros": [ + { + "arguments": [], + "dependsOnMacros": [ + "macro.dbt_date.default__get_base_dates" + ], + "description": "", + "name": "get_base_dates", + "packageName": "dbt_date", + "sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\\n{% endmacro %}", + "uniqueId": "macro.dbt_date.get_base_dates" + } + ], + "materialization": { + "targetDataset": "DATASET~420E546583F6D2B1719826B06A843433", + "type": "TABLE" + }, + "packageName": "jaffle_shop", + "rawSql": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n{{ dbt_date.get_base_dates(n_dateparts=365*10, datepart=\"day\") }}\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.metricflow_time_spine" + }, + "logicalId": { + "name": "jaffle_shop.metricflow_time_spine", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "metricflow_time_spine" + } + }, + { + "dbtModel": { + "compiledSql": "\\n\\nwith order_items as (\\n\\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\n\\norders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.order_items", + "fields": [ + { + "fieldName": "order_item_id", + "fieldPath": "order_item_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "order_id", + "fieldPath": "order_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_id", + "fieldPath": "product_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "ordered_at", + "fieldPath": "ordered_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "subtotal", + "fieldPath": "subtotal", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "is_food_item", + "fieldPath": "is_food_item", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "is_drink_item", + "fieldPath": "is_drink_item", + "nativeType": "NUMBER", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~9C04E59044AD0F422EA6A996BCD17E39", + "type": "TABLE" + }, + "packageName": "jaffle_shop", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_item_id'\\n )\\n}}\\n\\nwith order_items as (\\n\\n select * from {{ ref('stg_order_items') }}\\n\\n),\\n\\n\\norders as (\\n \\n select * from {{ ref('stg_orders')}}\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "sourceModels": [ + "VIRTUAL_VIEW~CCD5BC591587FEE1871D151FCC12EEB1", + "VIRTUAL_VIEW~B4C1C7825A401215D815D4520CD8ECF4", + "VIRTUAL_VIEW~769DCFC28317E6CDC8B9D7E745BA61CB" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.order_items" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~CCD5BC591587FEE1871D151FCC12EEB1", + "VIRTUAL_VIEW~B4C1C7825A401215D815D4520CD8ECF4", + "VIRTUAL_VIEW~769DCFC28317E6CDC8B9D7E745BA61CB" + ] + }, + "logicalId": { + "name": "jaffle_shop.order_items", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "order_items" + } + }, + { + "dbtModel": { + "compiledSql": "\\n\\n\\nwith orders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n\\n),\\n\\norder_items as (\\n \\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n),\\n\\nsupplies as (\\n\\n select * from acme.jaffle_shop.stg_supplies\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "description": "Order overview data mart, offering key details for each order inlcluding if it's a customer's first order and a food vs. drink item breakdown. One row per order.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.orders", + "fields": [ + { + "description": "The unique key of the orders mart.", + "fieldName": "order_id", + "fieldPath": "order_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "The foreign key relating to the location the order was placed at.", + "fieldName": "location_id", + "fieldPath": "location_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "The foreign key relating to the customer who placed the order.", + "fieldName": "customer_id", + "fieldPath": "customer_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "The total amount of the order in USD including tax.", + "fieldName": "order_total", + "fieldPath": "order_total", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "tax_paid", + "fieldPath": "tax_paid", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "The timestamp the order was placed at.", + "fieldName": "ordered_at", + "fieldPath": "ordered_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "description": "A boolean indicating if this order included any food items.", + "fieldName": "is_food_order", + "fieldPath": "is_food_order", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "description": "A boolean indicating if this order included any drink items.", + "fieldName": "is_drink_order", + "fieldPath": "is_drink_order", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "description": "The sum of supply expenses to fulfill the order.", + "fieldName": "order_cost", + "fieldPath": "order_cost", + "nativeType": "NUMBER", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~2628FDFF9D0CDB0AA4E3FC0FFBED02B8", + "type": "TABLE" + }, + "packageName": "jaffle_shop", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\n\\nwith orders as (\\n \\n select * from {{ ref('stg_orders')}}\\n\\n),\\n\\norder_items as (\\n \\n select * from {{ ref('stg_order_items')}}\\n\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n),\\n\\nsupplies as (\\n\\n select * from {{ ref('stg_supplies') }}\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "sourceModels": [ + "VIRTUAL_VIEW~CCD5BC591587FEE1871D151FCC12EEB1", + "VIRTUAL_VIEW~B4C1C7825A401215D815D4520CD8ECF4", + "VIRTUAL_VIEW~769DCFC28317E6CDC8B9D7E745BA61CB", + "VIRTUAL_VIEW~226AEA90AFE7D7925BCE6CE156C2D901" + ], + "tests": [ + { + "columns": [ + "order_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_orders_order_id", + "sql": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.orders\\nwhere order_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_orders_order_id.cf6c17daed" + }, + { + "columns": [ + "customer_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_relationships" + ], + "name": "relationships_orders_customer_id__customer_id__ref_stg_customers_", + "sql": "\\n \\n \\n\\nwith child as (\\n select customer_id as from_field\\n from acme.jaffle_shop.orders\\n where customer_id is not null\\n),\\n\\nparent as (\\n select customer_id as to_field\\n from acme.jaffle_shop.stg_customers\\n)\\n\\nselect\\n from_field\\n\\nfrom child\\nleft join parent\\n on child.from_field = parent.to_field\\n\\nwhere parent.to_field is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_stg_customers_.918495ce16" + }, + { + "columns": [ + "order_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_orders_order_id", + "sql": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_orders_order_id.fed79b3a6e" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.orders" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~CCD5BC591587FEE1871D151FCC12EEB1", + "VIRTUAL_VIEW~B4C1C7825A401215D815D4520CD8ECF4", + "VIRTUAL_VIEW~769DCFC28317E6CDC8B9D7E745BA61CB", + "VIRTUAL_VIEW~226AEA90AFE7D7925BCE6CE156C2D901" + ] + }, + "logicalId": { + "name": "jaffle_shop.orders", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "orders" + } + }, + { + "dbtModel": { + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "Customer data with basic cleaning and transformation applied, one row per customer.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_customers", + "fields": [ + { + "description": "The unique key for each customer.", + "fieldName": "customer_id", + "fieldPath": "customer_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "customer_name", + "fieldPath": "customer_name", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~512557BF25DF9EAEE5584140A235E03D", + "type": "VIEW" + }, + "packageName": "jaffle_shop", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "customer_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_customers_customer_id", + "sql": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa" + }, + { + "columns": [ + "customer_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_customers_customer_id", + "sql": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_customers" + }, + "logicalId": { + "name": "jaffle_shop.stg_customers", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_customers" + } + }, + { + "dbtModel": { + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- \\n -- where opened_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "List of open locations with basic cleaning and transformation applied, one row per location.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_locations", + "fields": [ + { + "description": "The unique key for each location.", + "fieldName": "location_id", + "fieldPath": "location_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "location_name", + "fieldPath": "location_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "tax_rate", + "fieldPath": "tax_rate", + "nativeType": "FLOAT", + "tags": [] + }, + { + "fieldName": "opened_at", + "fieldPath": "opened_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~30CDC644AD658B552B89094BA9B3E99C", + "type": "VIEW" + }, + "packageName": "jaffle_shop", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- {# data runs to 2026, truncate timespan to desired range, \\n -- current time as default #}\\n -- where opened_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "location_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_locations_location_id", + "sql": "\\n \\n \\n\\n\\n\\nselect location_id\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_locations_location_id.3d237927d2" + }, + { + "columns": [ + "location_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_locations_location_id", + "sql": "\\n \\n \\n\\nselect\\n location_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is not null\\ngroup by location_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_locations_location_id.2e2fc58ecc" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_locations" + }, + "logicalId": { + "name": "jaffle_shop.stg_locations", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_locations" + } + }, + { + "dbtModel": { + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "Individual food and drink items that make up our orders, one row per item.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_order_items", + "fields": [ + { + "description": "The unique key for each order item.", + "fieldName": "order_item_id", + "fieldPath": "order_item_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "order_id", + "fieldPath": "order_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_id", + "fieldPath": "product_id", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~D1E6B89E5891621E2DB93165B5828719", + "type": "VIEW" + }, + "packageName": "jaffle_shop", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "order_item_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_order_items_order_item_id", + "sql": "\\n \\n \\n\\n\\n\\nselect order_item_id\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_order_items_order_item_id.26a7e2bc35" + }, + { + "columns": [ + "order_item_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_order_items_order_item_id", + "sql": "\\n \\n \\n\\nselect\\n order_item_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is not null\\ngroup by order_item_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_order_items_order_item_id.90e333a108" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_order_items" + }, + "logicalId": { + "name": "jaffle_shop.stg_order_items", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_order_items" + } + }, + { + "dbtModel": { + "compiledSql": "\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "Order data with basic cleaning and transformation applied, one row per order.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_orders", + "fields": [ + { + "description": "The unique key for each order.", + "fieldName": "order_id", + "fieldPath": "order_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "location_id", + "fieldPath": "location_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "customer_id", + "fieldPath": "customer_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "order_total", + "fieldPath": "order_total", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "tax_paid", + "fieldPath": "tax_paid", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "ordered_at", + "fieldPath": "ordered_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~757BF2FC19B9BBD0DBAFD90768F2DC8B", + "type": "TABLE" + }, + "packageName": "jaffle_shop", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "order_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_orders_order_id", + "sql": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64" + }, + { + "columns": [ + "order_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_orders_order_id", + "sql": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_orders" + }, + "logicalId": { + "name": "jaffle_shop.stg_orders", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_orders" + } + }, + { + "dbtModel": { + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "Product (food and drink items that can be ordered) data with basic cleaning and transformation applied, one row per product.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_products", + "fields": [ + { + "description": "The unique key for each product.", + "fieldName": "product_id", + "fieldPath": "product_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_name", + "fieldPath": "product_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_type", + "fieldPath": "product_type", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_description", + "fieldPath": "product_description", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_price", + "fieldPath": "product_price", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "is_food_item", + "fieldPath": "is_food_item", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "is_drink_item", + "fieldPath": "is_drink_item", + "nativeType": "NUMBER", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~2932BAF66BF93D64D6AC2F457698EB52", + "type": "VIEW" + }, + "packageName": "jaffle_shop", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "product_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_products_product_id", + "sql": "\\n \\n \\n\\n\\n\\nselect product_id\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_products_product_id.6373b0acf3" + }, + { + "columns": [ + "product_id" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_products_product_id", + "sql": "\\n \\n \\n\\nselect\\n product_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is not null\\ngroup by product_id\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_products_product_id.7d950a1467" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_products" + }, + "logicalId": { + "name": "jaffle_shop.stg_products", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_products" + } + }, + { + "dbtModel": { + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n \\n \\nmd5(cast(coalesce(cast(id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sku as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "description": "List of our supply expenses data with basic cleaning and transformation applied.\\nOne row per supply cost, not per supply. As supply costs fluctuate they receive a new row with a new UUID. Thus there can be multiple rows per supply_id.\\n", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/model/model.jaffle_shop.stg_supplies", + "fields": [ + { + "description": "The unique key of our supplies per cost.", + "fieldName": "supply_uuid", + "fieldPath": "supply_uuid", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "supply_id", + "fieldPath": "supply_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "product_id", + "fieldPath": "product_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "supply_name", + "fieldPath": "supply_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "supply_cost", + "fieldPath": "supply_cost", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "is_perishable_supply", + "fieldPath": "is_perishable_supply", + "nativeType": "BOOLEAN", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~A808B6BE5F80DA3A550D49C92E8F2D89", + "type": "VIEW" + }, + "packageName": "jaffle_shop", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n {{ dbt_utils.generate_surrogate_key(['id', 'sku']) }} as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "tests": [ + { + "columns": [ + "supply_uuid" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null" + ], + "name": "not_null_stg_supplies_supply_uuid", + "sql": "\\n \\n \\n\\n\\n\\nselect supply_uuid\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is null\\n\\n\\n", + "uniqueId": "test.jaffle_shop.not_null_stg_supplies_supply_uuid.515c6eda6d" + }, + { + "columns": [ + "supply_uuid" + ], + "dependsOnMacros": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique" + ], + "name": "unique_stg_supplies_supply_uuid", + "sql": "\\n \\n \\n\\nselect\\n supply_uuid as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is not null\\ngroup by supply_uuid\\nhaving count(*) > 1\\n\\n\\n", + "uniqueId": "test.jaffle_shop.unique_stg_supplies_supply_uuid.c9e3edcfed" + } + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.jaffle_shop.stg_supplies" + }, + "logicalId": { + "name": "jaffle_shop.stg_supplies", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "jaffle_shop" + ], + "name": "stg_supplies" + } + }, + { + "dbtMetric": { + "description": "The cumulative revenue for all orders.", + "dimensions": [], + "filters": [], + "label": "Cumulative Revenue (All Time)", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "cumulative", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.cumulative_revenue" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.cumulative_revenue", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Distict count of customers placing orders", + "dimensions": [], + "filters": [], + "label": "Customers w/ Orders", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.customers_with_orders" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.customers_with_orders", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Count of orders that contain food order items", + "dimensions": [], + "filters": [], + "label": "Food Orders", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.food_orders" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.food_orders", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "The revenue from food in each order", + "dimensions": [], + "filters": [], + "label": "Food Revenue", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.food_revenue" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.food_revenue", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "The % of order revenue from food.", + "dimensions": [], + "filters": [], + "label": "Food Revenue %", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "ratio", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.food_revenue_pct" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.food_revenue_pct", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Count of orders with order total over 20.", + "dimensions": [], + "filters": [], + "label": "Large Orders", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.large_order" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.large_order", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Count of locations that placed in order.", + "dimensions": [], + "filters": [], + "label": "Locations", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.locations" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.locations", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "The median revenue for each order item. Excludes tax.", + "dimensions": [], + "filters": [], + "label": "Median Revenue", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.median_revenue" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.median_revenue", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Unique count of new customers.", + "dimensions": [], + "filters": [], + "label": "New Customers", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.new_customer" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.new_customer", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Sum of cost for each order item.", + "dimensions": [], + "filters": [], + "label": "Order Cost", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.order_cost" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.order_cost", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Gross profit from each order.", + "dimensions": [], + "filters": [], + "label": "Order Gross Profit", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "derived", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.order_gross_profit" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.order_gross_profit", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Count of orders.", + "dimensions": [], + "filters": [], + "label": "Orders", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.orders" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.orders", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Sum of total order amonunt. Includes tax + revenue.", + "dimensions": [], + "filters": [], + "label": "Order Total", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.order_total" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.order_total", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Sum of the product revenue for each order item. Excludes tax.", + "dimensions": [], + "filters": [], + "label": "Revenue", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "simple", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.revenue" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.revenue", + "type": "DBT_METRIC" + } + }, + { + "dbtMetric": { + "description": "Percentage growth of revenue compared to 1 month ago. Excluded tax", + "dimensions": [], + "filters": [], + "label": "Revenue Growth % M/M", + "packageName": "jaffle_shop", + "timeGrains": [], + "type": "derived", + "url": "https://cloud.getdbt.com/accounts/1/jobs/21/docs/#!/metric/metric.jaffle_shop.revenue_growth_mom" + }, + "entityUpstream": {}, + "logicalId": { + "name": "jaffle_shop.revenue_growth_mom", + "type": "DBT_METRIC" + } + }, + { + "documentation": { + "datasetDocumentations": [ + "This dataset contains profile info of each customer. E.g first name, last name, email, company name etc." + ], + "fieldDocumentations": [ + { + "documentation": "Auto-generated ID", + "fieldPath": "id" + }, + { + "documentation": "Customer's first name", + "fieldPath": "first_name" + }, + { + "documentation": "Customer's last name", + "fieldPath": "last_name" + }, + { + "documentation": "Customer's email address", + "fieldPath": "email" + }, + { + "documentation": "Customer's company", + "fieldPath": "company" + }, + { + "documentation": "Creation timestamp", + "fieldPath": "date" + } + ] + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "demo_db.metaphor.customer_profile", + "platform": "SNOWFLAKE" + } + }, + { + "documentation": { + "datasetDocumentations": [ + "This dataset contains all subscriptions info." + ], + "fieldDocumentations": [ + { + "documentation": "Creation timestamp", + "fieldPath": "created_at" + }, + { + "documentation": "Subscription full display name", + "fieldPath": "long_name" + }, + { + "documentation": "Subscription price in cents", + "fieldPath": "price" + }, + { + "documentation": "Type of subscription renewal", + "fieldPath": "renew_type" + }, + { + "documentation": "Subscription short name", + "fieldPath": "short_name" + }, + { + "documentation": "Primary Key", + "fieldPath": "sub_id" + } + ] + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "demo_db.metaphor.subscriptions_base", + "platform": "SNOWFLAKE" + } + }, + { + "documentation": { + "datasetDocumentations": [ + "This dataset represents all the raw subscription changes info of our product. Each subscription is represented by unique subs_id and each change has a unique chng_id." + ], + "fieldDocumentations": [ + { + "documentation": "Change type", + "fieldPath": "change_type" + }, + { + "documentation": "Primary Key", + "fieldPath": "chg_id" + }, + { + "documentation": "Creation timestamp", + "fieldPath": "created_at" + }, + { + "documentation": "Subscription ID", + "fieldPath": "sub_id" + } + ] + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "demo_db.metaphor.subscriptions_change_raw", + "platform": "SNOWFLAKE" + } + }, + { + "dbtModel": { + "compiledSql": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "description": "This dataset contains info about churn, all the accounts that have cancelled the subscriptions in the past.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.churn_region_agg", + "fields": [ + { + "description": "Auto-generated ID", + "fieldName": "id", + "fieldPath": "id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Customer ID", + "fieldName": "cus_id", + "fieldPath": "cus_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "The reason for churn", + "fieldName": "reason", + "fieldPath": "reason", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Timestamp at which the subscription is created.", + "fieldName": "sub_date", + "fieldPath": "sub_date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "description": "Timestamp at which the subscription is cancelled.", + "fieldName": "cancel_date", + "fieldPath": "cancel_date", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~21B252FB382AB10032F473675A8E563E", + "type": "TABLE" + }, + "meta": [ + { + "key": "owner", + "value": "[\"mars@metaphor.io\", \"pardhu@metaphor.io\"]" + } + ], + "packageName": "metaphor_subscriptions", + "rawSql": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "sourceModels": [ + "VIRTUAL_VIEW~C0B4C324A0D6AB95F681101D76B04791" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.churn_region_agg" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~C0B4C324A0D6AB95F681101D76B04791" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.churn_region_agg", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "churn_region_agg" + } + }, + { + "dbtModel": { + "compiledSql": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom DEMO_DB.METAPHOR.churn_region_agg as churn_region_agg", + "description": "This dataset contains info about all modular campaigns. The id represents campaign id.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.modular_campaigns", + "fields": [ + { + "description": "Auto-generated ID", + "fieldName": "id", + "fieldPath": "id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Campaign name", + "fieldName": "name", + "fieldPath": "name", + "nativeType": "TEXT", + "tags": [ + "pii", + "name" + ] + }, + { + "description": "Point of contact", + "fieldName": "poc", + "fieldPath": "poc", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Budget for the campaign in dollars", + "fieldName": "budget", + "fieldPath": "budget", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Creation timestamp", + "fieldName": "date", + "fieldPath": "date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~21F8510010F67ABD176C2F09C61C0177", + "type": "TABLE" + }, + "meta": [ + { + "key": "owner", + "value": "[\"mars@metaphor.io\", \"pardhu@metaphor.io\"]" + } + ], + "packageName": "metaphor_subscriptions", + "rawSql": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom {{ ref('churn_region_agg') }} as churn_region_agg", + "sourceModels": [ + "VIRTUAL_VIEW~1BD666702EACA1E95384807DA0DC92C7" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.modular_campaigns" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~1BD666702EACA1E95384807DA0DC92C7" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.modular_campaigns", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "modular_campaigns" + } + }, + { + "dbtModel": { + "compiledSql": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "description": "This dataset contains all subscriptions core info.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.subscriptions_core", + "fields": [ + { + "description": "Subscription ID", + "fieldName": "sub_id", + "fieldPath": "sub_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "URL for the subscription", + "fieldName": "url", + "fieldPath": "url", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Creation timestamp", + "fieldName": "created_at", + "fieldPath": "created_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~F134F3E0858F50ABE766D17D9C558AE3", + "type": "TABLE" + }, + "packageName": "metaphor_subscriptions", + "rawSql": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "sourceDatasets": [ + "DATASET~5641EFE05C0E7A164E64267CB76ED41B" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.subscriptions_core" + }, + "entityUpstream": { + "sourceEntities": [ + "DATASET~5641EFE05C0E7A164E64267CB76ED41B" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.subscriptions_core", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "subscriptions_core" + }, + "systemTags": { + "tags": [ + { + "systemTagSource": "DBT", + "value": "subscription" + } + ] + } + }, + { + "dbtModel": { + "compiledSql": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom DEMO_DB.METAPHOR.SUBSCRIPTIONS_CHANGE_RAW as change_raw\ninner join DEMO_DB.METAPHOR.SUBSCRIPTIONS_BASE as base on base.sub_id = change_raw.sub_id\ninner join DEMO_DB.METAPHOR.subscriptions_v2 as v2 on v2.sub_id = change_raw.sub_id", + "description": "This dataset represents all the subscription info of our product. Each subscription is represented by unique subs_id.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.subscriptions_growth", + "fields": [ + { + "description": "Unique ID of the Subscription. This is generated at the time of creation of a new subscription.", + "fieldName": "sub_id", + "fieldPath": "sub_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Unique ID of the Customer. This is generated end of registration flow for a user.", + "fieldName": "customer_id", + "fieldPath": "customer_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Timestamp at which the subscription is created. Always represented in Pacific Time Zone irrespective of the location of the user.", + "fieldName": "created_at", + "fieldPath": "created_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~23E009C6A5D14EE37FCF7DDC9C847939", + "type": "TABLE" + }, + "meta": [ + { + "key": "owner", + "value": "[\"mars@metaphor.io\", \"pardhu@metaphor.io\"]" + } + ], + "packageName": "metaphor_subscriptions", + "rawSql": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom {{ source('METAPHOR', 'SUBSCRIPTIONS_CHANGE_RAW') }} as change_raw\ninner join {{ source('METAPHOR', 'SUBSCRIPTIONS_BASE') }} as base on base.sub_id = change_raw.sub_id\ninner join {{ ref('subscriptions_v2') }} as v2 on v2.sub_id = change_raw.sub_id", + "sourceDatasets": [ + "DATASET~2F3C1202ADA5F7AEC4BD6F33A40A2F82", + "DATASET~A448444CA0CDE26D11CDCE4A6D640AF9" + ], + "sourceModels": [ + "VIRTUAL_VIEW~40E84C63748265E978F781C89A23BE51" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.subscriptions_growth" + }, + "entityUpstream": { + "sourceEntities": [ + "DATASET~2F3C1202ADA5F7AEC4BD6F33A40A2F82", + "DATASET~A448444CA0CDE26D11CDCE4A6D640AF9", + "VIRTUAL_VIEW~40E84C63748265E978F781C89A23BE51" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.subscriptions_growth", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "subscriptions_growth" + }, + "systemTags": { + "tags": [ + { + "systemTagSource": "DBT", + "value": "subscription" + }, + { + "systemTagSource": "DBT", + "value": "growth" + } + ] + } + }, + { + "dbtModel": { + "compiledSql": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "description": "This dataset represents the sales info for each subscription.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.subscriptions_sales", + "fields": [ + { + "description": "Auto-generated ID", + "fieldName": "sales_id", + "fieldPath": "sales_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Subscription ID", + "fieldName": "sub_id", + "fieldPath": "sub_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Sales representative ID", + "fieldName": "sale_rep_id", + "fieldPath": "sale_rep_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Size of the deals in dollars", + "fieldName": "deal_size", + "fieldPath": "deal_size", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Creation timestamp", + "fieldName": "created_at", + "fieldPath": "created_at", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~EA7EEF0E21B06B0282E4BC3B7FD508A8", + "type": "TABLE" + }, + "packageName": "metaphor_subscriptions", + "rawSql": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "sourceModels": [ + "VIRTUAL_VIEW~C0B4C324A0D6AB95F681101D76B04791" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.subscriptions_sales" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~C0B4C324A0D6AB95F681101D76B04791" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.subscriptions_sales", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "subscriptions_sales" + }, + "systemTags": { + "tags": [ + { + "systemTagSource": "DBT", + "value": "subscription" + } + ] + } + }, + { + "dbtModel": { + "compiledSql": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "description": "This dataset contains all newer subscriptions info.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/22/docs/#!/model/model.metaphor_subscriptions.subscriptions_v2", + "fields": [ + { + "description": "Primary Key", + "fieldName": "sub_id", + "fieldPath": "sub_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Subscription short name", + "fieldName": "short_name", + "fieldPath": "short_name", + "nativeType": "TEXT", + "tags": [ + "pii", + "name" + ] + }, + { + "description": "Subscription full display name", + "fieldName": "long_name", + "fieldPath": "long_name", + "nativeType": "TEXT", + "tags": [ + "pii", + "name" + ] + }, + { + "description": "Available regions, comma separated", + "fieldName": "region", + "fieldPath": "region", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Subscription price in cents", + "fieldName": "price", + "fieldPath": "price", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Type of subscription renewal", + "fieldName": "renew_type", + "fieldPath": "renew_type", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "Creation timestamp", + "fieldName": "created_at", + "fieldPath": "created_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~4112E9F599D5D58340D7DD2D77458017", + "type": "TABLE" + }, + "meta": [ + { + "key": "owner", + "value": "[\"mars@metaphor.io\", \"pardhu@metaphor.io\"]" + } + ], + "packageName": "metaphor_subscriptions", + "rawSql": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "sourceDatasets": [ + "DATASET~5641EFE05C0E7A164E64267CB76ED41B" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.metaphor_subscriptions.subscriptions_v2" + }, + "entityUpstream": { + "sourceEntities": [ + "DATASET~5641EFE05C0E7A164E64267CB76ED41B" + ] + }, + "logicalId": { + "name": "metaphor_subscriptions.subscriptions_v2", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "metaphor_subscriptions" + ], + "name": "subscriptions_v2" + }, + "systemTags": { + "tags": [ + { + "systemTagSource": "DBT", + "value": "subscription" + } + ] + } + }, + { + "documentation": { + "datasetDocumentations": [], + "fieldDocumentations": [ + { + "documentation": "Duration of the bike trip in seconds.", + "fieldPath": "duration" + } + ] + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.berlin_bicycles.cycle_hire", + "platform": "SNOWFLAKE" + } + }, + { + "documentation": { + "datasetDocumentations": [], + "fieldDocumentations": [] + }, + "logicalId": { + "account": "john.doe@metaphor.io", + "name": "acme.berlin_bicycles.cycle_stations", + "platform": "SNOWFLAKE" + } + }, + { + "dbtModel": { + "compiledSql": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration_minutes) as total_minutes\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.ride_share.raw_bike_hires\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_minutes DESC", + "description": "This table contains a transformed version of the raw_bike_hires table, which includes additional calculated fields such as creating a duration in minutes field. Each ride has been aggregated so any journey that starts and ends at the same station, in the same month and roughly time of day are aggregated together to get the total minutes similar journeys have taken\n", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.cleaned_bike_rides", + "fields": [ + { + "description": "The total minutes of a particular journey in a month and general time of day.", + "fieldName": "total_minutes", + "fieldPath": "total_minutes", + "nativeType": "NUMBER", + "tags": [ + "aggregates" + ] + }, + { + "description": "Total number of bike hires of the same journey in a particular month and time of day", + "fieldName": "total_bike_hires", + "fieldPath": "total_bike_hires", + "nativeType": "NUMBER", + "tags": [ + "aggregates" + ] + }, + { + "description": "", + "fieldName": "average_duration", + "fieldPath": "average_duration", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "Month the bike hire was in", + "fieldName": "month", + "fieldPath": "month", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_peak_travel", + "fieldPath": "start_peak_travel", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "same_station_flag", + "fieldPath": "same_station_flag", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "description": "", + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "", + "fieldName": "end_station_id", + "fieldPath": "end_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "", + "fieldName": "end_station_name", + "fieldPath": "end_station_name", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~92D9D4AFB0BA1FAE857D2F67B1C5651F", + "type": "TABLE" + }, + "meta": [ + { + "key": "dbt_tags", + "value": "[\"pii\", \"marketplace\", \"apps\"]" + }, + { + "key": "data_product_manager", + "value": "\"kirit\"" + } + ], + "packageName": "london_bike_analysis", + "rawSql": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration_minutes) as total_minutes\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('raw_bike_hires') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_minutes DESC", + "sourceModels": [ + "VIRTUAL_VIEW~AB431420AAB09A0DC1E238C1D2A8A423" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.cleaned_bike_rides" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~AB431420AAB09A0DC1E238C1D2A8A423" + ] + }, + "logicalId": { + "name": "london_bike_analysis.cleaned_bike_rides", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "cleaned_bike_rides" + }, + "systemTags": { + "tags": [ + { + "systemTagSource": "DBT", + "value": "bike_ride_data" + } + ] + } + }, + { + "dbtModel": { + "compiledSql": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration) as total_seconds\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.snapshots.cycle_hire_snapshot\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_seconds DESC", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.cleaned_bike_rides_from_snapshot", + "fields": [ + { + "fieldName": "total_seconds", + "fieldPath": "total_seconds", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "total_bike_hires", + "fieldPath": "total_bike_hires", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "average_duration", + "fieldPath": "average_duration", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "month", + "fieldPath": "month", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_peak_travel", + "fieldPath": "start_peak_travel", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "same_station_flag", + "fieldPath": "same_station_flag", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "end_station_id", + "fieldPath": "end_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_name", + "fieldPath": "end_station_name", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~D20E9E03D75970A68AF0D43B5E37B1C1", + "type": "TABLE" + }, + "packageName": "london_bike_analysis", + "rawSql": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration) as total_seconds\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('cycle_hire_snapshot') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_seconds DESC", + "sourceModels": [ + "VIRTUAL_VIEW~FEE8405461EBC519C4D9B3A20C4E251C" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.cleaned_bike_rides_from_snapshot" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~FEE8405461EBC519C4D9B3A20C4E251C" + ] + }, + "logicalId": { + "name": "london_bike_analysis.cleaned_bike_rides_from_snapshot", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "cleaned_bike_rides_from_snapshot" + } + }, + { + "dbtModel": { + "compiledSql": "SELECT \n rental_id\n , duration as duration_seconds\n , duration / 60 as duration_minutes\n , bike_id\n , start_date\n , start_station_id\n , start_station_name\n , end_date\n , end_station_id\n , end_station_name\nFROM ACME.berlin_bicycles.cycle_hire\nWHERE EXTRACT(year from start_date) = 2017", + "description": "This table contains all bike hires in London in 2017. This is the raw dataset so no cleaning or transformation.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.raw_bike_hires", + "fields": [ + { + "fieldName": "rental_id", + "fieldPath": "rental_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "duration_seconds", + "fieldPath": "duration_seconds", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "duration_minutes", + "fieldPath": "duration_minutes", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "bike_id", + "fieldPath": "bike_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_date", + "fieldPath": "start_date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "end_date", + "fieldPath": "end_date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "end_station_id", + "fieldPath": "end_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_name", + "fieldPath": "end_station_name", + "nativeType": "TEXT", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~846FFD46D882F745471F05CDAC30771E", + "type": "TABLE" + }, + "packageName": "london_bike_analysis", + "rawSql": "SELECT \r\n rental_id\r\n , duration as duration_seconds\r\n , duration / 60 as duration_minutes\r\n , bike_id\r\n , start_date\r\n , start_station_id\r\n , start_station_name\r\n , end_date\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ source('berlin_bicycles', 'cycle_hire') }}\r\nWHERE EXTRACT(year from start_date) = 2017", + "sourceDatasets": [ + "DATASET~F0C9B26420A687DD5EFC906351F8540A" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.raw_bike_hires" + }, + "entityUpstream": { + "sourceEntities": [ + "DATASET~F0C9B26420A687DD5EFC906351F8540A" + ] + }, + "logicalId": { + "name": "london_bike_analysis.raw_bike_hires", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "raw_bike_hires" + } + }, + { + "dbtModel": { + "compiledSql": "SELECT \n id\n , name as station_name\n , bikes_count\n , docks_count\n , install_date\n , removal_date\nFROM ACME.berlin_bicycles.cycle_stations\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "description": "This table contains all bike stations in the London area. This only includes stations intalled before January 1, 2017 and doesn't include stations that were removed in 2017 (before Jan 1 2018). This is the raw data so no cleaning or transformation.", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.raw_bike_stations", + "fields": [ + { + "description": "Primary Key", + "fieldName": "id", + "fieldPath": "id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "", + "fieldName": "station_name", + "fieldPath": "station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "description": "", + "fieldName": "bikes_count", + "fieldPath": "bikes_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "", + "fieldName": "docks_count", + "fieldPath": "docks_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "description": "", + "fieldName": "install_date", + "fieldPath": "install_date", + "nativeType": "DATE", + "tags": [] + }, + { + "description": "", + "fieldName": "removal_date", + "fieldPath": "removal_date", + "nativeType": "DATE", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~94CCD222AF1361D0A9D552BB8DC69A8A", + "type": "TABLE" + }, + "packageName": "london_bike_analysis", + "rawSql": "SELECT \r\n id\r\n , name as station_name\r\n , bikes_count\r\n , docks_count\r\n , install_date\r\n , removal_date\r\nFROM {{ source('berlin_bicycles', 'cycle_stations') }}\r\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "sourceDatasets": [ + "DATASET~221DEF4E5CF338DA5E3CD9F86862E9AD" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.raw_bike_stations" + }, + "entityUpstream": { + "sourceEntities": [ + "DATASET~221DEF4E5CF338DA5E3CD9F86862E9AD" + ] + }, + "logicalId": { + "name": "london_bike_analysis.raw_bike_stations", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "raw_bike_stations" + } + }, + { + "dbtModel": { + "compiledSql": "WITH stations AS (\n\n SELECT *\n FROM ACME.ride_share.raw_bike_stations\n\n),\n\nrides AS (\n\n SELECT *\n FROM ACME.ride_share.cleaned_bike_rides\n\n),\n\nstart_stat_join AS (\n\n SELECT rides.*\n , stations.bikes_count as start_station_bikes_count\n , stations.docks_count as start_station_docks_count\n , stations.install_date as start_station_install_date\n FROM rides\n LEFT JOIN stations\n ON rides.start_station_id = stations.id\n)\n\nSELECT \n total_minutes \n , total_bike_hires \n , average_duration \n , month \n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count \n , start_station_docks_count \n , start_station_install_date \n , end_station_id\n , end_station_name\n , stations.bikes_count as end_station_bikes_count\n , stations.docks_count as end_station_docks_count\n , stations.install_date as end_station_install_date\nFROM start_stat_join\nLEFT JOIN stations\nON start_stat_join.end_station_id = stations.id", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.rides_by_month_2017", + "fields": [ + { + "fieldName": "total_minutes", + "fieldPath": "total_minutes", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "total_bike_hires", + "fieldPath": "total_bike_hires", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "average_duration", + "fieldPath": "average_duration", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "month", + "fieldPath": "month", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_peak_travel", + "fieldPath": "start_peak_travel", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "same_station_flag", + "fieldPath": "same_station_flag", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "start_station_bikes_count", + "fieldPath": "start_station_bikes_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_docks_count", + "fieldPath": "start_station_docks_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_install_date", + "fieldPath": "start_station_install_date", + "nativeType": "DATE", + "tags": [] + }, + { + "fieldName": "end_station_id", + "fieldPath": "end_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_name", + "fieldPath": "end_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "end_station_bikes_count", + "fieldPath": "end_station_bikes_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_docks_count", + "fieldPath": "end_station_docks_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_install_date", + "fieldPath": "end_station_install_date", + "nativeType": "DATE", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~92BCFD89936A73CBB7CA66BE21E65B8A", + "type": "TABLE" + }, + "packageName": "london_bike_analysis", + "rawSql": "WITH stations AS (\r\n\r\n SELECT *\r\n FROM {{ ref('raw_bike_stations') }}\r\n\r\n),\r\n\r\nrides AS (\r\n\r\n SELECT *\r\n FROM {{ ref('cleaned_bike_rides') }}\r\n\r\n),\r\n\r\nstart_stat_join AS (\r\n\r\n SELECT rides.*\r\n , stations.bikes_count as start_station_bikes_count\r\n , stations.docks_count as start_station_docks_count\r\n , stations.install_date as start_station_install_date\r\n FROM rides\r\n LEFT JOIN stations\r\n ON rides.start_station_id = stations.id\r\n)\r\n\r\nSELECT \r\n total_minutes \r\n , total_bike_hires \r\n , average_duration \r\n , month \r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count \r\n , start_station_docks_count \r\n , start_station_install_date \r\n , end_station_id\r\n , end_station_name\r\n , stations.bikes_count as end_station_bikes_count\r\n , stations.docks_count as end_station_docks_count\r\n , stations.install_date as end_station_install_date\r\nFROM start_stat_join\r\nLEFT JOIN stations\r\nON start_stat_join.end_station_id = stations.id", + "sourceModels": [ + "VIRTUAL_VIEW~E2AC9FA16880683393FEF02E8AE6F087", + "VIRTUAL_VIEW~BEABE1FF13F4749A5287A0C3AB91A51C" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.rides_by_month_2017" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~E2AC9FA16880683393FEF02E8AE6F087", + "VIRTUAL_VIEW~BEABE1FF13F4749A5287A0C3AB91A51C" + ] + }, + "logicalId": { + "name": "london_bike_analysis.rides_by_month_2017", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "rides_by_month_2017" + } + }, + { + "dbtModel": { + "compiledSql": "SELECT \n SUM(total_minutes) AS total_minutes\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\n , SUM(total_bike_hires) AS total_bike_hires\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\n , month\n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count\n , start_station_docks_count\n , start_station_install_date\nFROM ACME.ride_share.rides_by_month_2017\nGROUP BY 5,6,7,8,9,10,11,12\nORDER BY total_bike_hires DESC", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/model.london_bike_analysis.rides_by_month_start_station_2017", + "fields": [ + { + "fieldName": "total_minutes", + "fieldPath": "total_minutes", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "total_hours", + "fieldPath": "total_hours", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "total_bike_hires", + "fieldPath": "total_bike_hires", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "average_duration_in_minutes", + "fieldPath": "average_duration_in_minutes", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "month", + "fieldPath": "month", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_peak_travel", + "fieldPath": "start_peak_travel", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "same_station_flag", + "fieldPath": "same_station_flag", + "nativeType": "BOOLEAN", + "tags": [] + }, + { + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "start_station_bikes_count", + "fieldPath": "start_station_bikes_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_docks_count", + "fieldPath": "start_station_docks_count", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_install_date", + "fieldPath": "start_station_install_date", + "nativeType": "DATE", + "tags": [] + } + ], + "materialization": { + "targetDataset": "DATASET~F190E85DB324F612C64041A640B83248", + "type": "TABLE" + }, + "packageName": "london_bike_analysis", + "rawSql": "SELECT \r\n SUM(total_minutes) AS total_minutes\r\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\r\n , SUM(total_bike_hires) AS total_bike_hires\r\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\r\n , month\r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count\r\n , start_station_docks_count\r\n , start_station_install_date\r\nFROM {{ ref('rides_by_month_2017') }}\r\nGROUP BY 5,6,7,8,9,10,11,12\r\nORDER BY total_bike_hires DESC", + "sourceModels": [ + "VIRTUAL_VIEW~C7AD0F50959B4A41901E513EB19B2592" + ], + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/model.london_bike_analysis.rides_by_month_start_station_2017" + }, + "entityUpstream": { + "sourceEntities": [ + "VIRTUAL_VIEW~C7AD0F50959B4A41901E513EB19B2592" + ] + }, + "logicalId": { + "name": "london_bike_analysis.rides_by_month_start_station_2017", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "rides_by_month_start_station_2017" + } + }, + { + "dbtModel": { + "compiledSql": "\n\n\nselect * from ACME.berlin_bicycles.cycle_hire", + "docsUrl": "https://cloud.getdbt.com/accounts/1/jobs/23/docs/#!/model/snapshot.london_bike_analysis.cycle_hire_snapshot", + "fields": [ + { + "fieldName": "rental_id", + "fieldPath": "rental_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "duration", + "fieldPath": "duration", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "bike_id", + "fieldPath": "bike_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_date", + "fieldPath": "end_date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "end_station_id", + "fieldPath": "end_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_name", + "fieldPath": "end_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "start_date", + "fieldPath": "start_date", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "start_station_id", + "fieldPath": "start_station_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_name", + "fieldPath": "start_station_name", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "end_station_logical_terminal", + "fieldPath": "end_station_logical_terminal", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "start_station_logical_terminal", + "fieldPath": "start_station_logical_terminal", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "end_station_priority_id", + "fieldPath": "end_station_priority_id", + "nativeType": "NUMBER", + "tags": [] + }, + { + "fieldName": "pricing_tier", + "fieldPath": "pricing_tier", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "dbt_scd_id", + "fieldPath": "dbt_scd_id", + "nativeType": "TEXT", + "tags": [] + }, + { + "fieldName": "dbt_updated_at", + "fieldPath": "dbt_updated_at", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "dbt_valid_from", + "fieldPath": "dbt_valid_from", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + }, + { + "fieldName": "dbt_valid_to", + "fieldPath": "dbt_valid_to", + "nativeType": "TIMESTAMP_NTZ", + "tags": [] + } + ], + "packageName": "london_bike_analysis", + "rawSql": "\n{{\n config(\n target_schema='snapshots',\n strategy='check',\n unique_key='bike_id',\n check_cols=['start_date', 'end_date']\n )\n}}\n\nselect * from {{ source('berlin_bicycles', 'cycle_hire') }}\n", + "url": "https://cloud.getdbt.com/explore/1/projects/123/environments/production/details/snapshot.london_bike_analysis.cycle_hire_snapshot" + }, + "logicalId": { + "name": "london_bike_analysis.cycle_hire_snapshot", + "type": "DBT_MODEL" + }, + "structure": { + "directories": [ + "london_bike_analysis" + ], + "name": "cycle_hire_snapshot" + } + } +] diff --git a/tests/dbt/cloud/fake_graphql_server/__init__.py b/tests/dbt/cloud/fake_graphql_server/__init__.py new file mode 100644 index 00000000..e22a807a --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/__init__.py @@ -0,0 +1,5 @@ +from .fake_graphql_server import endpoints + +__all__ = [ + "endpoints", +] diff --git a/tests/dbt/cloud/fake_graphql_server/fake_graphql_server.py b/tests/dbt/cloud/fake_graphql_server/fake_graphql_server.py new file mode 100644 index 00000000..af0c2323 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/fake_graphql_server.py @@ -0,0 +1,35 @@ +from tests.dbt.cloud.fake_graphql_server.get_environment_adapter_type.endpoint import ( + fake_GetEnvironmentAdapterType, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_macros.endpoint import ( + fake_GetJobRunMacros, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_metrics.endpoint import ( + fake_GetJobRunMetrics, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_models.endpoint import ( + fake_GetJobRunModels, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_snapshots.endpoint import ( + fake_GetJobRunSnapshots, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_sources.endpoint import ( + fake_GetJobRunSources, +) +from tests.dbt.cloud.fake_graphql_server.get_job_run_tests.endpoint import ( + fake_GetJobRunTests, +) +from tests.dbt.cloud.fake_graphql_server.get_macro_arguments.endpoint import ( + fake_GetMacroArguments, +) + +endpoints = { + "GetEnvironmentAdapterType": fake_GetEnvironmentAdapterType, + "GetJobRunMacros": fake_GetJobRunMacros, + "GetJobRunMetrics": fake_GetJobRunMetrics, + "GetJobRunModels": fake_GetJobRunModels, + "GetJobRunSnapshots": fake_GetJobRunSnapshots, + "GetJobRunSources": fake_GetJobRunSources, + "GetJobRunTests": fake_GetJobRunTests, + "GetMacroArguments": fake_GetMacroArguments, +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/endpoint.py new file mode 100644 index 00000000..913bcc98 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import environment_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetEnvironmentAdapterType(variables: Dict[str, Any]): + target = environment_targets[variables["environmentId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/jaffle_shop.json new file mode 100644 index 00000000..f1fd80b6 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/jaffle_shop.json @@ -0,0 +1,6 @@ +{ + "environment": { + "adapterType": "snowflake", + "dbtProjectName": "jaffle_shop" + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/london_bike_analysis.json new file mode 100644 index 00000000..2e9cdb8a --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/london_bike_analysis.json @@ -0,0 +1,6 @@ +{ + "environment": { + "adapterType": "snowflake", + "dbtProjectName": "london_bike_analysis" + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/metaphor_subscriptions.json new file mode 100644 index 00000000..9a15c2a0 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_environment_adapter_type/metaphor_subscriptions.json @@ -0,0 +1,6 @@ +{ + "environment": { + "adapterType": "snowflake", + "dbtProjectName": "metaphor_subscriptions" + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/endpoint.py new file mode 100644 index 00000000..dab06c39 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunMacros(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/jaffle_shop.json new file mode 100644 index 00000000..3d6497fe --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/jaffle_shop.json @@ -0,0 +1,1286 @@ +{ + "job": { + "macros": [ + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "bigquery__convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__convert_timezone" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro bigquery__date_part(datepart, date) -%}\\n extract({{ datepart }} from {{ date }})\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__date_part", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__date_part" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro bigquery__day_name(date, short) -%}\\n{%- set f = '%a' if short else '%A' -%}\\n format_date('{{ f }}', cast({{ date }} as date))\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__day_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__day_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro bigquery__day_of_week(date, isoweek) -%}\\n\\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\\n\\n {%- if isoweek -%}\\n case\\n -- Shift start of week from Sunday (1) to Monday (2)\\n when {{ dow }} = 1 then 7\\n else {{ dow }} - 1\\n end\\n {%- else -%}\\n {{ dow }}\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__day_of_week" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\\n {%- if format == \"seconds\" -%}\\n timestamp_seconds({{ epochs }})\\n {%- elif format == \"milliseconds\" -%}\\n timestamp_millis({{ epochs }})\\n {%- elif format == \"microseconds\" -%}\\n timestamp_micros({{ epochs }})\\n {%- else -%}\\n {{ exceptions.raise_compiler_error(\\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\\n )\\n }}\\n {% endif -%}\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__from_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__from_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt.dateadd", + "macro.dbt_date.today", + "macro.dbt_date.tomorrow", + "macro.dbt_date.date_spine", + "macro.dbt.type_timestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\\n\\n{%- if start_date and end_date -%}\\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as date )\" -%}\\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as date )\" -%}\\n\\n{%- elif n_dateparts and datepart -%}\\n\\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\\n{%- set end_date = dbt_date.tomorrow() -%}\\n{%- endif -%}\\n\\nwith date_spine as\\n(\\n\\n {{ dbt_date.date_spine(\\n datepart=datepart,\\n start_date=start_date,\\n end_date=end_date,\\n )\\n }}\\n\\n)\\nselect\\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\\nfrom\\n date_spine d\\n{% endmacro %}", + "meta": {}, + "name": "bigquery__get_base_dates", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__get_base_dates" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro bigquery__month_name(date, short) -%}\\n{%- set f = '%b' if short else '%B' -%}\\n format_date('{{ f }}', cast({{ date }} as date))\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__month_name" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\\n unix_seconds({{ timestamp }})\\n{%- endmacro %}", + "meta": {}, + "name": "bigquery__to_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.bigquery__to_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.default__convert_timezone" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.convert_timezone" + }, + { + "dependsOn": [ + "macro.dbt_date.default__date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro date_part(datepart, date) -%}\\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\\n{%- endmacro %}", + "meta": {}, + "name": "date_part", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.date_part" + }, + { + "dependsOn": [ + "macro.dbt_date.default__date_spine" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro date_spine(datepart, start_date, end_date) %}\\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\\n{%- endmacro %}", + "meta": {}, + "name": "date_spine", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.date_spine" + }, + { + "dependsOn": [ + "macro.dbt_date.snowflake__day_name" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro day_name(date, short=True) -%}\\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\\n{%- endmacro %}", + "meta": {}, + "name": "day_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.day_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro day_of_month(date) -%}\\n{{ dbt_date.date_part('day', date) }}\\n{%- endmacro %}", + "meta": {}, + "name": "day_of_month", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.day_of_month" + }, + { + "dependsOn": [ + "macro.dbt_date.snowflake__day_of_week" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro day_of_week(date, isoweek=true) -%}\\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\\n{%- endmacro %}", + "meta": {}, + "name": "day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.day_of_week" + }, + { + "dependsOn": [ + "macro.dbt_date.default__day_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro day_of_year(date) -%}\\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\\n{%- endmacro %}", + "meta": {}, + "name": "day_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.day_of_year" + }, + { + "dependsOn": [ + "macro.dbt.type_timestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\\n cast({{ column }} as {{ dbt.type_timestamp() }})\\n)\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "default__convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__convert_timezone" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__date_part(datepart, date) -%}\\n date_part('{{ datepart }}', {{ date }})\\n{%- endmacro %}", + "meta": {}, + "name": "default__date_part", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__date_part" + }, + { + "dependsOn": [ + "macro.dbt_date.generate_series", + "macro.dbt_date.get_intervals_between", + "macro.dbt.dateadd" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__date_spine(datepart, start_date, end_date) %}\\n\\n\\n{# call as follows:\\n\\ndate_spine(\\n \"day\",\\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\\n \"dbt.dateadd(week, 1, current_date)\"\\n) #}\\n\\n\\nwith rawdata as (\\n\\n {{\\n dbt_date.generate_series(\\n dbt_date.get_intervals_between(start_date, end_date, datepart)\\n )\\n }}\\n\\n),\\n\\nall_periods as (\\n\\n select (\\n {{\\n dbt.dateadd(\\n datepart,\\n \"row_number() over (order by 1) - 1\",\\n start_date\\n )\\n }}\\n ) as date_{{datepart}}\\n from rawdata\\n\\n),\\n\\nfiltered as (\\n\\n select *\\n from all_periods\\n where date_{{datepart}} <= {{ end_date }}\\n\\n)\\n\\nselect * from filtered\\n\\n{% endmacro %}", + "meta": {}, + "name": "default__date_spine", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__date_spine" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__day_name(date, short) -%}\\n{%- set f = 'Dy' if short else 'Day' -%}\\n to_char({{ date }}, '{{ f }}')\\n{%- endmacro %}", + "meta": {}, + "name": "default__day_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__day_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__day_of_week(date, isoweek) -%}\\n\\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\\n\\n {%- if isoweek -%}\\n case\\n -- Shift start of week from Sunday (0) to Monday (1)\\n when {{ dow }} = 0 then 7\\n else {{ dow }}\\n end\\n {%- else -%}\\n {{ dow }} + 1\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "default__day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__day_of_week" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__day_of_year(date) -%}\\n {{ dbt_date.date_part('dayofyear', date) }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__day_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__day_of_year" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\\n {%- if format != \"seconds\" -%}\\n {{ exceptions.raise_compiler_error(\\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\\n )\\n }}\\n {% endif -%}\\n to_timestamp({{ epochs }})\\n{%- endmacro %}", + "meta": {}, + "name": "default__from_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__from_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.get_powers_of_two" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__generate_series(upper_bound) %}\\n\\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\\n\\n with p as (\\n select 0 as generated_number union all select 1\\n ), unioned as (\\n\\n select\\n\\n {% for i in range(n) %}\\n p{{i}}.generated_number * power(2, {{i}})\\n {% if not loop.last %} + {% endif %}\\n {% endfor %}\\n + 1\\n as generated_number\\n\\n from\\n\\n {% for i in range(n) %}\\n p as p{{i}}\\n {% if not loop.last %} cross join {% endif %}\\n {% endfor %}\\n\\n )\\n\\n select *\\n from unioned\\n where generated_number <= {{upper_bound}}\\n order by generated_number\\n\\n{% endmacro %}", + "meta": {}, + "name": "default__generate_series", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__generate_series" + }, + { + "dependsOn": [ + "macro.dbt.type_timestamp", + "macro.dbt.dateadd", + "macro.dbt_date.today", + "macro.dbt_date.tomorrow", + "macro.dbt_date.date_spine" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\\n\\n{%- if start_date and end_date -%}\\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\\n\\n{%- elif n_dateparts and datepart -%}\\n\\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\\n{%- set end_date = dbt_date.tomorrow() -%}\\n{%- endif -%}\\n\\nwith date_spine as\\n(\\n\\n {{ dbt_date.date_spine(\\n datepart=datepart,\\n start_date=start_date,\\n end_date=end_date,\\n )\\n }}\\n\\n)\\nselect\\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\\nfrom\\n date_spine d\\n{% endmacro %}", + "meta": {}, + "name": "default__get_base_dates", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__get_base_dates" + }, + { + "dependsOn": [ + "macro.dbt_date.get_base_dates", + "macro.dbt.dateadd", + "macro.dbt_date.yesterday", + "macro.dbt_date.tomorrow", + "macro.dbt_date.day_of_week", + "macro.dbt_date.day_name", + "macro.dbt_date.day_of_month", + "macro.dbt_date.day_of_year", + "macro.dbt_date.week_start", + "macro.dbt_date.week_end", + "macro.dbt_date.week_of_year", + "macro.dbt_date.iso_week_start", + "macro.dbt_date.iso_week_end", + "macro.dbt_date.iso_week_of_year", + "macro.dbt_date.date_part", + "macro.dbt.type_int", + "macro.dbt_date.month_name", + "macro.dbt.date_trunc", + "macro.dbt.last_day" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__get_date_dimension(start_date, end_date) %}\\nwith base_dates as (\\n {{ dbt_date.get_base_dates(start_date, end_date) }}\\n),\\ndates_with_prior_year_dates as (\\n\\n select\\n cast(d.date_day as date) as date_day,\\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\\n from\\n \\tbase_dates d\\n\\n)\\nselect\\n d.date_day,\\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\\n d.prior_year_date_day as prior_year_date_day,\\n d.prior_year_over_year_date_day,\\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\\n\\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\\n\\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\\n\\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\\n\\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\\n\\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\\n\\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\\n\\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\\n\\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\\nfrom\\n dates_with_prior_year_dates d\\norder by 1\\n{% endmacro %}", + "meta": {}, + "name": "default__get_date_dimension", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__get_date_dimension" + }, + { + "dependsOn": [ + "macro.dbt.dateadd", + "macro.dbt.datediff" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\\n-- this gets all the dates within a fiscal year\\n-- determined by the given year-end-month\\n-- ending on the saturday closest to that month's end date\\nwith date_dimension as (\\n select * from {{ dates }}\\n),\\nyear_month_end as (\\n\\n select\\n d.year_number - {{ shift_year }} as fiscal_year_number,\\n d.month_end_date\\n from\\n date_dimension d\\n where\\n d.month_of_year = {{ year_end_month }}\\n group by 1,2\\n\\n),\\nweeks as (\\n\\n select\\n d.year_number,\\n d.month_of_year,\\n d.date_day as week_start_date,\\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\\n from\\n date_dimension d\\n where\\n d.day_of_week = {{ week_start_day }}\\n\\n),\\n-- get all the weeks that start in the month the year ends\\nyear_week_ends as (\\n\\n select\\n d.year_number - {{ shift_year }} as fiscal_year_number,\\n d.week_end_date\\n from\\n weeks d\\n where\\n d.month_of_year = {{ year_end_month }}\\n group by\\n 1,2\\n\\n),\\n-- then calculate which Saturday is closest to month end\\nweeks_at_month_end as (\\n\\n select\\n d.fiscal_year_number,\\n d.week_end_date,\\n m.month_end_date,\\n rank() over\\n (partition by d.fiscal_year_number\\n order by\\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\\n\\n ) as closest_to_month_end\\n from\\n year_week_ends d\\n join\\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\\n),\\nfiscal_year_range as (\\n\\n select\\n w.fiscal_year_number,\\n cast(\\n {{ dbt.dateadd('day', 1,\\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\\n as date) as fiscal_year_start_date,\\n w.week_end_date as fiscal_year_end_date\\n from\\n weeks_at_month_end w\\n where\\n w.closest_to_month_end = 1\\n\\n),\\nfiscal_year_dates as (\\n\\n select\\n d.date_day,\\n m.fiscal_year_number,\\n m.fiscal_year_start_date,\\n m.fiscal_year_end_date,\\n w.week_start_date,\\n w.week_end_date,\\n -- we reset the weeks of the year starting with the merch year start date\\n dense_rank()\\n over(\\n partition by m.fiscal_year_number\\n order by w.week_start_date\\n ) as fiscal_week_of_year\\n from\\n date_dimension d\\n join\\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\\n join\\n weeks w on d.date_day between w.week_start_date and w.week_end_date\\n\\n)\\nselect * from fiscal_year_dates order by 1\\n{% endmacro %}", + "meta": {}, + "name": "default__get_fiscal_year_dates", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__get_fiscal_year_dates" + }, + { + "dependsOn": [ + "macro.dbt.statement", + "macro.dbt.datediff" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\\n {%- call statement('get_intervals_between', fetch_result=True) %}\\n\\n select {{ dbt.datediff(start_date, end_date, datepart) }}\\n\\n {%- endcall -%}\\n\\n {%- set value_list = load_result('get_intervals_between') -%}\\n\\n {%- if value_list and value_list['data'] -%}\\n {%- set values = value_list['data'] | map(attribute=0) | list %}\\n {{ return(values[0]) }}\\n {%- else -%}\\n {{ return(1) }}\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "default__get_intervals_between", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__get_intervals_between" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro default__get_powers_of_two(upper_bound) %}\\n\\n {% if upper_bound <= 0 %}\\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\\n {% endif %}\\n\\n {% for _ in range(1, 100) %}\\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\\n {% endfor %}\\n\\n{% endmacro %}", + "meta": {}, + "name": "default__get_powers_of_two", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__get_powers_of_two" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_end" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__iso_week_end(date) -%}\\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__iso_week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__iso_week_end" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__iso_week_of_year(date) -%}\\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__iso_week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__iso_week_of_year" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_start" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__iso_week_start(date) -%}\\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__iso_week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__iso_week_start" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__month_name(date, short) -%}\\n{%- set f = 'MON' if short else 'MONTH' -%}\\n to_char({{ date }}, '{{ f }}')\\n{%- endmacro %}", + "meta": {}, + "name": "default__month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro default__to_unixtimestamp(timestamp) -%}\\n {{ dbt_date.date_part('epoch', timestamp) }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__to_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__to_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt.last_day" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro default__week_end(date) -%}\\n{{ last_day(date, 'week') }}\\n{%- endmacro %}", + "meta": {}, + "name": "default__week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__week_end" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_int" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro default__week_of_year(date) -%}\\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\\n{%- endmacro %}", + "meta": {}, + "name": "default__week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__week_of_year" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro default__week_start(date) -%}\\ncast({{ dbt.date_trunc('week', date) }} as date)\\n{%- endmacro %}", + "meta": {}, + "name": "default__week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.default__week_start" + }, + { + "dependsOn": [ + "macro.dbt_date.snowflake__from_unixtimestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\\n{%- endmacro %}", + "meta": {}, + "name": "from_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.from_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.default__generate_series" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro generate_series(upper_bound) %}\\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\\n{% endmacro %}", + "meta": {}, + "name": "generate_series", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.generate_series" + }, + { + "dependsOn": [ + "macro.dbt_date.default__get_base_dates" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\\n{% endmacro %}", + "meta": {}, + "name": "get_base_dates", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_base_dates" + }, + { + "dependsOn": [ + "macro.dbt_date.default__get_date_dimension" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_date_dimension(start_date, end_date) %}\\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\\n{% endmacro %}", + "meta": {}, + "name": "get_date_dimension", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_date_dimension" + }, + { + "dependsOn": [ + "macro.dbt_date.get_fiscal_year_dates", + "macro.dbt.type_int" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\\n{#\\nThis macro requires you to pass in a ref to a date dimension, created via\\ndbt_date.get_date_dimension()s\\n#}\\nwith fscl_year_dates_for_periods as (\\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\\n),\\nfscl_year_w13 as (\\n\\n select\\n f.*,\\n -- We count the weeks in a 13 week period\\n -- and separate the 4-5-4 week sequences\\n mod(cast(\\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\\n ), 13) as w13_number,\\n -- Chop weeks into 13 week merch quarters\\n cast(\\n least(\\n floor((f.fiscal_week_of_year-1)/13.0)\\n , 3)\\n as {{ dbt.type_int() }}) as quarter_number\\n from\\n fscl_year_dates_for_periods f\\n\\n),\\nfscl_periods as (\\n\\n select\\n f.date_day,\\n f.fiscal_year_number,\\n f.week_start_date,\\n f.week_end_date,\\n f.fiscal_week_of_year,\\n case\\n -- we move week 53 into the 3rd period of the quarter\\n when f.fiscal_week_of_year = 53 then 3\\n when f.w13_number between 0 and 3 then 1\\n when f.w13_number between 4 and 8 then 2\\n when f.w13_number between 9 and 12 then 3\\n end as period_of_quarter,\\n f.quarter_number\\n from\\n fscl_year_w13 f\\n\\n),\\nfscl_periods_quarters as (\\n\\n select\\n f.*,\\n cast((\\n (f.quarter_number * 3) + f.period_of_quarter\\n ) as {{ dbt.type_int() }}) as fiscal_period_number\\n from\\n fscl_periods f\\n\\n)\\nselect\\n date_day,\\n fiscal_year_number,\\n week_start_date,\\n week_end_date,\\n fiscal_week_of_year,\\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\\n fiscal_period_number,\\n quarter_number+1 as fiscal_quarter_number,\\n period_of_quarter as fiscal_period_of_quarter\\nfrom\\n fscl_periods_quarters\\norder by 1,2\\n{% endmacro %}", + "meta": {}, + "name": "get_fiscal_periods", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_fiscal_periods" + }, + { + "dependsOn": [ + "macro.dbt_date.default__get_fiscal_year_dates" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\\n{% endmacro %}", + "meta": {}, + "name": "get_fiscal_year_dates", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_fiscal_year_dates" + }, + { + "dependsOn": [ + "macro.dbt_date.default__get_intervals_between" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\\n{%- endmacro %}", + "meta": {}, + "name": "get_intervals_between", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_intervals_between" + }, + { + "dependsOn": [ + "macro.dbt_date.default__get_powers_of_two" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro get_powers_of_two(upper_bound) %}\\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\\n{% endmacro %}", + "meta": {}, + "name": "get_powers_of_two", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.get_powers_of_two" + }, + { + "dependsOn": [ + "macro.dbt_date.iso_week_start", + "macro.dbt_date.n_days_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro _iso_week_end(date, week_type) -%}\\n{%- set dt = dbt_date.iso_week_start(date) -%}\\n{{ dbt_date.n_days_away(6, dt) }}\\n{%- endmacro %}", + "meta": {}, + "name": "_iso_week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date._iso_week_end" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.snowflake__iso_week_end" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro iso_week_end(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "iso_week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.iso_week_end" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_int" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro _iso_week_of_year(date, week_type) -%}\\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\\n{%- endmacro %}", + "meta": {}, + "name": "_iso_week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date._iso_week_of_year" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.snowflake__iso_week_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "iso_week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.iso_week_of_year" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro _iso_week_start(date, week_type) -%}\\ncast({{ dbt.date_trunc(week_type, date) }} as date)\\n{%- endmacro %}", + "meta": {}, + "name": "_iso_week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date._iso_week_start" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.snowflake__iso_week_start" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro iso_week_start(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "iso_week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.iso_week_start" + }, + { + "dependsOn": [ + "macro.dbt_date.n_months_ago" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro last_month(tz=None) -%}\\n{{ dbt_date.n_months_ago(1, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "last_month", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.last_month" + }, + { + "dependsOn": [ + "macro.dbt_date.month_name", + "macro.dbt_date.last_month" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro last_month_name(short=True, tz=None) -%}\\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "last_month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.last_month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt_date.last_month" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro last_month_number(tz=None) -%}\\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "last_month_number", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.last_month_number" + }, + { + "dependsOn": [ + "macro.dbt_date.n_weeks_ago" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro last_week(tz=None) -%}\\n{{ dbt_date.n_weeks_ago(1, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "last_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.last_week" + }, + { + "dependsOn": [ + "macro.dbt_date.snowflake__month_name" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro month_name(date, short=True) -%}\\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\\n{%- endmacro %}", + "meta": {}, + "name": "month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt.dateadd" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{%- set n = n|int -%}\\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\\n{%- endmacro -%}", + "meta": {}, + "name": "n_days_ago", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_days_ago" + }, + { + "dependsOn": [ + "macro.dbt_date.n_days_ago" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_days_away(n, date=None, tz=None) -%}\\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "n_days_away", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_days_away" + }, + { + "dependsOn": [ + "macro.dbt_date.n_months_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro next_month(tz=None) -%}\\n{{ dbt_date.n_months_away(1, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "next_month", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.next_month" + }, + { + "dependsOn": [ + "macro.dbt_date.month_name", + "macro.dbt_date.next_month" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro next_month_name(short=True, tz=None) -%}\\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "next_month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.next_month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt_date.next_month" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro next_month_number(tz=None) -%}\\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "next_month_number", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.next_month_number" + }, + { + "dependsOn": [ + "macro.dbt_date.n_weeks_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro next_week(tz=None) -%}\\n{{ dbt_date.n_weeks_away(1, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "next_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.next_week" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc", + "macro.dbt.dateadd", + "macro.dbt_date.today" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_months_ago(n, tz=None) -%}\\n{%- set n = n|int -%}\\n{{ dbt.date_trunc('month',\\n dbt.dateadd('month', -1 * n,\\n dbt_date.today(tz)\\n )\\n ) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "n_months_ago", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_months_ago" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc", + "macro.dbt.dateadd", + "macro.dbt_date.today" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_months_away(n, tz=None) -%}\\n{%- set n = n|int -%}\\n{{ dbt.date_trunc('month',\\n dbt.dateadd('month', n,\\n dbt_date.today(tz)\\n )\\n ) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "n_months_away", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_months_away" + }, + { + "dependsOn": [ + "macro.dbt_date.convert_timezone", + "macro.dbt.current_timestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro now(tz=None) -%}\\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "now", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.now" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc", + "macro.dbt.dateadd", + "macro.dbt_date.today" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_weeks_ago(n, tz=None) -%}\\n{%- set n = n|int -%}\\n{{ dbt.date_trunc('week',\\n dbt.dateadd('week', -1 * n,\\n dbt_date.today(tz)\\n )\\n ) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "n_weeks_ago", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_weeks_ago" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc", + "macro.dbt.dateadd", + "macro.dbt_date.today" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro n_weeks_away(n, tz=None) -%}\\n{%- set n = n|int -%}\\n{{ dbt.date_trunc('week',\\n dbt.dateadd('week', n,\\n dbt_date.today(tz)\\n )\\n ) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "n_weeks_away", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.n_weeks_away" + }, + { + "dependsOn": [ + "macro.dbt.datediff", + "macro.dbt_date.now" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "periods_since", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.periods_since" + }, + { + "dependsOn": [ + "macro.dbt.type_timestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\\ncast(\\n cast({{ column }} as {{ dbt.type_timestamp() }})\\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\\n)\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "postgres__convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__convert_timezone" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__day_name(date, short) -%}\\n{# FM = Fill mode, which suppresses padding blanks #}\\n{%- set f = 'FMDy' if short else 'FMDay' -%}\\n to_char({{ date }}, '{{ f }}')\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__day_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__day_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_int" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n\\n{%- macro postgres__day_of_week(date, isoweek) -%}\\n\\n {%- if isoweek -%}\\n {%- set dow_part = 'isodow' -%}\\n -- Monday(1) to Sunday (7)\\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\\n {%- else -%}\\n {%- set dow_part = 'dow' -%}\\n -- Sunday(1) to Saturday (7)\\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__day_of_week" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__day_of_year(date) -%}\\n {{ dbt_date.date_part('doy', date) }}\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__day_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__day_of_year" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\\n {%- if format != \"seconds\" -%}\\n {{ exceptions.raise_compiler_error(\\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\\n )\\n }}\\n {% endif -%}\\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__from_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__from_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.get_base_dates", + "macro.dbt.dateadd", + "macro.dbt_date.yesterday", + "macro.dbt_date.tomorrow", + "macro.dbt_date.day_of_week", + "macro.dbt_date.day_name", + "macro.dbt_date.day_of_month", + "macro.dbt_date.day_of_year", + "macro.dbt_date.week_start", + "macro.dbt_date.week_end", + "macro.dbt_date.week_of_year", + "macro.dbt_date.iso_week_start", + "macro.dbt_date.iso_week_end", + "macro.dbt_date.iso_week_of_year", + "macro.dbt_date.date_part", + "macro.dbt.type_int", + "macro.dbt_date.month_name", + "macro.dbt.date_trunc", + "macro.dbt.last_day" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\\nwith base_dates as (\\n {{ dbt_date.get_base_dates(start_date, end_date) }}\\n),\\ndates_with_prior_year_dates as (\\n\\n select\\n cast(d.date_day as date) as date_day,\\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\\n from\\n \\tbase_dates d\\n\\n)\\nselect\\n d.date_day,\\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\\n d.prior_year_date_day as prior_year_date_day,\\n d.prior_year_over_year_date_day,\\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\\n\\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\\n\\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\\n\\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\\n\\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\\n\\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\\n\\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\\n\\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\\n\\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\\n\\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\\nfrom\\n dates_with_prior_year_dates d\\norder by 1\\n{% endmacro %}", + "meta": {}, + "name": "postgres__get_date_dimension", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__get_date_dimension" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__iso_week_of_year(date) -%}\\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\\n{{ dbt_date._iso_week_of_year(date, 'week') }}\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__iso_week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__iso_week_of_year" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_start" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__iso_week_start(date) -%}\\n{{ dbt_date._iso_week_start(date, 'week') }}\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__iso_week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__iso_week_start" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__month_name(date, short) -%}\\n{# FM = Fill mode, which suppresses padding blanks #}\\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\\n to_char({{ date }}, '{{ f }}')\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.week_start", + "macro.dbt_date.n_days_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__week_end(date) -%}\\n{%- set dt = dbt_date.week_start(date) -%}\\n{{ dbt_date.n_days_away(6, dt) }}\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__week_end" + }, + { + "dependsOn": [ + "macro.dbt.type_int" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__week_of_year(date) -%}\\n{# postgresql 'week' returns isoweek. Use to_char instead.\\n WW = the first week starts on the first day of the year #}\\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__week_of_year" + }, + { + "dependsOn": [ + "macro.dbt.dateadd", + "macro.dbt.date_trunc" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro postgres__week_start(date) -%}\\n-- Sunday as week start date\\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\\n{%- endmacro %}", + "meta": {}, + "name": "postgres__week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.postgres__week_start" + }, + { + "dependsOn": [ + "macro.dbt_date.default__convert_timezone" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "redshift__convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.redshift__convert_timezone" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_bigint" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro redshift__day_of_month(date) -%}\\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\\n{%- endmacro %}", + "meta": {}, + "name": "redshift__day_of_month", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.redshift__day_of_month" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_bigint" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n\\n{%- macro redshift__day_of_week(date, isoweek) -%}\\n\\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\\n\\n {%- if isoweek -%}\\n case\\n -- Shift start of week from Sunday (0) to Monday (1)\\n when {{ dow }} = 0 then 7\\n else cast({{ dow }} as {{ dbt.type_bigint() }})\\n end\\n {%- else -%}\\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "redshift__day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.redshift__day_of_week" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part", + "macro.dbt.type_bigint" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro redshift__day_of_year(date) -%}\\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\\n{%- endmacro %}", + "meta": {}, + "name": "redshift__day_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.redshift__day_of_year" + }, + { + "dependsOn": [ + "macro.dbt.date_trunc", + "macro.dbt.dateadd" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro round_timestamp(timestamp) %}\\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\\n{% endmacro %}", + "meta": {}, + "name": "round_timestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.round_timestamp" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__day_name(date, short) -%}\\n {%- if short -%}\\n dayname({{ date }})\\n {%- else -%}\\n -- long version not implemented on Snowflake so we're doing it manually :/\\n case dayname({{ date }})\\n when 'Mon' then 'Monday'\\n when 'Tue' then 'Tuesday'\\n when 'Wed' then 'Wednesday'\\n when 'Thu' then 'Thursday'\\n when 'Fri' then 'Friday'\\n when 'Sat' then 'Saturday'\\n when 'Sun' then 'Sunday'\\n end\\n {%- endif -%}\\n\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__day_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__day_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__day_of_week(date, isoweek) -%}\\n\\n {%- if isoweek -%}\\n {%- set dow_part = 'dayofweekiso' -%}\\n {{ dbt_date.date_part(dow_part, date) }}\\n {%- else -%}\\n {%- set dow_part = 'dayofweek' -%}\\n case\\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\\n else {{ dbt_date.date_part(dow_part, date) }} + 1\\n end\\n {%- endif -%}\\n\\n\\n\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__day_of_week", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__day_of_week" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\\n {%- if format == \"seconds\" -%}\\n {%- set scale = 0 -%}\\n {%- elif format == \"milliseconds\" -%}\\n {%- set scale = 3 -%}\\n {%- elif format == \"microseconds\" -%}\\n {%- set scale = 6 -%}\\n {%- else -%}\\n {{ exceptions.raise_compiler_error(\\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\\n )\\n }}\\n {% endif -%}\\n to_timestamp_ntz({{ epochs }}, {{ scale }})\\n\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__from_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__from_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_end" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__iso_week_end(date) -%}\\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__iso_week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__iso_week_end" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__iso_week_of_year(date) -%}\\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__iso_week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__iso_week_of_year" + }, + { + "dependsOn": [ + "macro.dbt_date._iso_week_start" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__iso_week_start(date) -%}\\n{{ dbt_date._iso_week_start(date, 'week') }}\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__iso_week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__iso_week_start" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__month_name(date, short) -%}\\n{%- set f = 'MON' if short else 'MMMM' -%}\\n to_char({{ date }}, '{{ f }}')\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__month_name", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__month_name" + }, + { + "dependsOn": [ + "macro.dbt_date.date_part" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__to_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__to_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.week_start", + "macro.dbt_date.n_days_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__week_end(date) -%}\\n{%- set dt = dbt_date.week_start(date) -%}\\n{{ dbt_date.n_days_away(6, dt) }}\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__week_end" + }, + { + "dependsOn": [ + "macro.dbt_date.day_of_week", + "macro.dbt.dateadd" + ], + "description": "", + "environmentId": 132676, + "macroSql": "\\n\\n{%- macro snowflake__week_start(date) -%}\\n {#\\n Get the day of week offset: e.g. if the date is a Sunday,\\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\\n #}\\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\\n{%- endmacro %}", + "meta": {}, + "name": "snowflake__week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.snowflake__week_start" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\\nfrom_utc_timestamp(\\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\\n '{{ target_tz }}'\\n )\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "spark__convert_timezone", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.spark__convert_timezone" + }, + { + "dependsOn": [ + "macro.dbt_date.now" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro today(tz=None) -%}\\ncast({{ dbt_date.now(tz) }} as date)\\n{%- endmacro -%}", + "meta": {}, + "name": "today", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.today" + }, + { + "dependsOn": [ + "macro.dbt_date.n_days_away" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro tomorrow(date=None, tz=None) -%}\\n{{ dbt_date.n_days_away(1, date, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "tomorrow", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.tomorrow" + }, + { + "dependsOn": [ + "macro.dbt_date.snowflake__to_unixtimestamp" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro to_unixtimestamp(timestamp) -%}\\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\\n{%- endmacro %}", + "meta": {}, + "name": "to_unixtimestamp", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.to_unixtimestamp" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.snowflake__week_end" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro week_end(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "week_end", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.week_end" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.default__week_of_year" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro week_of_year(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "week_of_year", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.week_of_year" + }, + { + "dependsOn": [ + "macro.dbt_date.today", + "macro.dbt_date.snowflake__week_start" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro week_start(date=None, tz=None) -%}\\n{%-set dt = date if date else dbt_date.today(tz) -%}\\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\\n{%- endmacro -%}\\n\\n", + "meta": {}, + "name": "week_start", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.week_start" + }, + { + "dependsOn": [ + "macro.dbt_date.n_days_ago" + ], + "description": "", + "environmentId": 132676, + "macroSql": "{%- macro yesterday(date=None, tz=None) -%}\\n{{ dbt_date.n_days_ago(1, date, tz) }}\\n{%- endmacro -%}", + "meta": {}, + "name": "yesterday", + "packageName": "dbt_date", + "uniqueId": "macro.dbt_date.yesterday" + }, + { + "dependsOn": [], + "description": "", + "environmentId": 132676, + "macroSql": "{% macro cents_to_dollars(column_name, precision=2) -%}\\n ({{ column_name }} / 100)::numeric(16, {{ precision }})\\n{%- endmacro %}", + "meta": {}, + "name": "cents_to_dollars", + "packageName": "jaffle_shop", + "uniqueId": "macro.jaffle_shop.cents_to_dollars" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/london_bike_analysis.json new file mode 100644 index 00000000..183fa652 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/london_bike_analysis.json @@ -0,0 +1,5 @@ +{ + "job": { + "macros": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/metaphor_subscriptions.json new file mode 100644 index 00000000..183fa652 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_macros/metaphor_subscriptions.json @@ -0,0 +1,5 @@ +{ + "job": { + "macros": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/endpoint.py new file mode 100644 index 00000000..28c1464b --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunMetrics(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/jaffle_shop.json new file mode 100644 index 00000000..b42474ce --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/jaffle_shop.json @@ -0,0 +1,278 @@ +{ + "job": { + "metrics": [ + { + "packageName": "jaffle_shop", + "label": "Cumulative Revenue (All Time)", + "description": "The cumulative revenue for all orders.", + "dependsOn": [ + "semantic_model.jaffle_shop.order_items" + ], + "uniqueId": "metric.jaffle_shop.cumulative_revenue", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "cumulative", + "sql": null, + "expression": null, + "calculation_method": "cumulative" + }, + { + "packageName": "jaffle_shop", + "label": "Customers w/ Orders", + "description": "Distict count of customers placing orders", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.customers_with_orders", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Food Orders", + "description": "Count of orders that contain food order items", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.food_orders", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Food Revenue", + "description": "The revenue from food in each order", + "dependsOn": [ + "semantic_model.jaffle_shop.order_items" + ], + "uniqueId": "metric.jaffle_shop.food_revenue", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Food Revenue %", + "description": "The % of order revenue from food.", + "dependsOn": [ + "metric.jaffle_shop.food_revenue", + "metric.jaffle_shop.revenue" + ], + "uniqueId": "metric.jaffle_shop.food_revenue_pct", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "ratio", + "sql": null, + "expression": null, + "calculation_method": "ratio" + }, + { + "packageName": "jaffle_shop", + "label": "Large Orders", + "description": "Count of orders with order total over 20.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.large_order", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Locations", + "description": "Count of locations that placed in order.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.locations", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Median Revenue", + "description": "The median revenue for each order item. Excludes tax.", + "dependsOn": [ + "semantic_model.jaffle_shop.order_items" + ], + "uniqueId": "metric.jaffle_shop.median_revenue", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "New Customers", + "description": "Unique count of new customers.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.new_customer", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Order Cost", + "description": "Sum of cost for each order item.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.order_cost", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Order Gross Profit", + "description": "Gross profit from each order.", + "dependsOn": [ + "metric.jaffle_shop.order_cost", + "metric.jaffle_shop.revenue" + ], + "uniqueId": "metric.jaffle_shop.order_gross_profit", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "derived", + "sql": null, + "expression": null, + "calculation_method": "derived" + }, + { + "packageName": "jaffle_shop", + "label": "Orders", + "description": "Count of orders.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.orders", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Order Total", + "description": "Sum of total order amonunt. Includes tax + revenue.", + "dependsOn": [ + "semantic_model.jaffle_shop.orders" + ], + "uniqueId": "metric.jaffle_shop.order_total", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Revenue", + "description": "Sum of the product revenue for each order item. Excludes tax.", + "dependsOn": [ + "semantic_model.jaffle_shop.order_items" + ], + "uniqueId": "metric.jaffle_shop.revenue", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "simple", + "sql": null, + "expression": null, + "calculation_method": "simple" + }, + { + "packageName": "jaffle_shop", + "label": "Revenue Growth % M/M", + "description": "Percentage growth of revenue compared to 1 month ago. Excluded tax", + "dependsOn": [ + "metric.jaffle_shop.revenue" + ], + "uniqueId": "metric.jaffle_shop.revenue_growth_mom", + "timeGrains": [], + "timestamp": null, + "dimensions": [], + "filters": [], + "tags": [], + "type": "derived", + "sql": null, + "expression": null, + "calculation_method": "derived" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/london_bike_analysis.json new file mode 100644 index 00000000..7bc09f91 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/london_bike_analysis.json @@ -0,0 +1,5 @@ +{ + "job": { + "metrics": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/metaphor_subscriptions.json new file mode 100644 index 00000000..7bc09f91 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_metrics/metaphor_subscriptions.json @@ -0,0 +1,5 @@ +{ + "job": { + "metrics": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_models/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_models/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/endpoint.py new file mode 100644 index 00000000..640b54f7 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunModels(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_models/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/jaffle_shop.json new file mode 100644 index 00000000..960207fa --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/jaffle_shop.json @@ -0,0 +1,751 @@ +{ + "job": { + "models": [ + { + "alias": "customers", + "columns": [ + { + "comment": null, + "description": "The unique key of the orders mart.", + "meta": {}, + "name": "CUSTOMER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": "Customers' full name.", + "meta": {}, + "name": "CUSTOMER_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": "Total number of orders a customer has ever placed.", + "meta": {}, + "name": "COUNT_LIFETIME_ORDERS", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": "The timestamp when a customer placed their first order.", + "meta": {}, + "name": "FIRST_ORDERED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": "The timestamp of a customer's most recent order.", + "meta": {}, + "name": "LAST_ORDERED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": "The sum of all the pre-tax subtotals of every order a customer has placed.", + "meta": {}, + "name": "LIFETIME_SPEND_PRETAX", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": "The sum of all the order totals (including tax) that a customer has ever placed.", + "meta": {}, + "name": "LIFETIME_SPEND", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": "Options are 'new' or 'returning', indicating if a customer has ordered more than once or has only placed their first order to date.", + "meta": {}, + "name": "CUSTOMER_TYPE", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "\\n\\nwith\\n\\ncustomers as (\\n\\n select * from acme.jaffle_shop.stg_customers\\n\\n),\\n\\norders_mart as (\\n\\n select * from acme.jaffle_shop.orders\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from acme.jaffle_shop.order_items\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "compiledSql": "\\n\\nwith\\n\\ncustomers as (\\n\\n select * from acme.jaffle_shop.stg_customers\\n\\n),\\n\\norders_mart as (\\n\\n select * from acme.jaffle_shop.orders\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from acme.jaffle_shop.order_items\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "database": "acme", + "dependsOn": [ + "model.jaffle_shop.order_items", + "model.jaffle_shop.orders", + "model.jaffle_shop.stg_customers" + ], + "description": "Customer overview data mart, offering key details for each unique customer. One row per customer.", + "environmentId": 132676, + "materializedType": "table", + "meta": {}, + "name": "customers", + "packageName": "jaffle_shop", + "rawCode": "{{\\n config(\\n materialized='table'\\n )\\n}}\\n\\nwith\\n\\ncustomers as (\\n\\n select * from {{ ref('stg_customers') }}\\n\\n),\\n\\norders_mart as (\\n\\n select * from {{ ref('orders') }}\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from {{ ref('order_items') }}\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "rawSql": "{{\\n config(\\n materialized='table'\\n )\\n}}\\n\\nwith\\n\\ncustomers as (\\n\\n select * from {{ ref('stg_customers') }}\\n\\n),\\n\\norders_mart as (\\n\\n select * from {{ ref('orders') }}\\n\\n),\\n\\norder_items_mart as (\\n\\n select * from {{ ref('order_items') }}\\n),\\n\\norder_summary as (\\n\\n select\\n customer_id,\\n\\n count(distinct om.order_id) as count_lifetime_orders,\\n count(distinct om.order_id) > 1 as is_repeat_buyer,\\n min(om.ordered_at) as first_ordered_at,\\n max(om.ordered_at) as last_ordered_at,\\n sum(oi.subtotal) as lifetime_spend_pretax,\\n sum(om.order_total) as lifetime_spend\\n\\n from orders_mart om\\n \\n left join order_items_mart oi on om.order_id = oi.order_id\\n \\n group by 1\\n\\n),\\n\\njoined as (\\n\\n select\\n customers.*,\\n order_summary.count_lifetime_orders,\\n order_summary.first_ordered_at,\\n order_summary.last_ordered_at,\\n order_summary.lifetime_spend_pretax,\\n order_summary.lifetime_spend,\\n\\n case\\n when order_summary.is_repeat_buyer then 'returning'\\n else 'new'\\n end as customer_type\\n\\n from customers\\n\\n left join order_summary\\n on customers.customer_id = order_summary.customer_id\\n\\n)\\n\\nselect * from joined", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:07.650Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:41.238Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.customers" + }, + { + "alias": "metricflow_time_spine", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "DATE_DAY", + "tags": [], + "type": "DATE" + } + ], + "compiledCode": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n\\n with date_spine as\\n(\\n\\n \\n\\n\\n\\n\\n\\nwith rawdata as (\\n\\n \\n\\n \\n\\n with p as (\\n select 0 as generated_number union all select 1\\n ), unioned as (\\n\\n select\\n\\n \\n p0.generated_number * power(2, 0)\\n + \\n \\n p1.generated_number * power(2, 1)\\n + \\n \\n p2.generated_number * power(2, 2)\\n + \\n \\n p3.generated_number * power(2, 3)\\n + \\n \\n p4.generated_number * power(2, 4)\\n + \\n \\n p5.generated_number * power(2, 5)\\n + \\n \\n p6.generated_number * power(2, 6)\\n + \\n \\n p7.generated_number * power(2, 7)\\n + \\n \\n p8.generated_number * power(2, 8)\\n + \\n \\n p9.generated_number * power(2, 9)\\n + \\n \\n p10.generated_number * power(2, 10)\\n + \\n \\n p11.generated_number * power(2, 11)\\n \\n \\n + 1\\n as generated_number\\n\\n from\\n\\n \\n p as p0\\n cross join \\n \\n p as p1\\n cross join \\n \\n p as p2\\n cross join \\n \\n p as p3\\n cross join \\n \\n p as p4\\n cross join \\n \\n p as p5\\n cross join \\n \\n p as p6\\n cross join \\n \\n p as p7\\n cross join \\n \\n p as p8\\n cross join \\n \\n p as p9\\n cross join \\n \\n p as p10\\n cross join \\n \\n p as p11\\n \\n \\n\\n )\\n\\n select *\\n from unioned\\n where generated_number <= 3651\\n order by generated_number\\n\\n\\n\\n),\\n\\nall_periods as (\\n\\n select (\\n \\n\\n dateadd(\\n day,\\n row_number() over (order by 1) - 1,\\n \\n\\n dateadd(\\n day,\\n -3650,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n\\n )\\n\\n\\n ) as date_day\\n from rawdata\\n\\n),\\n\\nfiltered as (\\n\\n select *\\n from all_periods\\n where date_day <= cast(\\n\\n dateadd(\\n day,\\n 1,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n as date)\\n\\n)\\n\\nselect * from filtered\\n\\n\\n\\n)\\nselect\\n cast(d.date_day as timestamp) as date_day\\nfrom\\n date_spine d\\n\\n\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "compiledSql": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n\\n with date_spine as\\n(\\n\\n \\n\\n\\n\\n\\n\\nwith rawdata as (\\n\\n \\n\\n \\n\\n with p as (\\n select 0 as generated_number union all select 1\\n ), unioned as (\\n\\n select\\n\\n \\n p0.generated_number * power(2, 0)\\n + \\n \\n p1.generated_number * power(2, 1)\\n + \\n \\n p2.generated_number * power(2, 2)\\n + \\n \\n p3.generated_number * power(2, 3)\\n + \\n \\n p4.generated_number * power(2, 4)\\n + \\n \\n p5.generated_number * power(2, 5)\\n + \\n \\n p6.generated_number * power(2, 6)\\n + \\n \\n p7.generated_number * power(2, 7)\\n + \\n \\n p8.generated_number * power(2, 8)\\n + \\n \\n p9.generated_number * power(2, 9)\\n + \\n \\n p10.generated_number * power(2, 10)\\n + \\n \\n p11.generated_number * power(2, 11)\\n \\n \\n + 1\\n as generated_number\\n\\n from\\n\\n \\n p as p0\\n cross join \\n \\n p as p1\\n cross join \\n \\n p as p2\\n cross join \\n \\n p as p3\\n cross join \\n \\n p as p4\\n cross join \\n \\n p as p5\\n cross join \\n \\n p as p6\\n cross join \\n \\n p as p7\\n cross join \\n \\n p as p8\\n cross join \\n \\n p as p9\\n cross join \\n \\n p as p10\\n cross join \\n \\n p as p11\\n \\n \\n\\n )\\n\\n select *\\n from unioned\\n where generated_number <= 3651\\n order by generated_number\\n\\n\\n\\n),\\n\\nall_periods as (\\n\\n select (\\n \\n\\n dateadd(\\n day,\\n row_number() over (order by 1) - 1,\\n \\n\\n dateadd(\\n day,\\n -3650,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n\\n )\\n\\n\\n ) as date_day\\n from rawdata\\n\\n),\\n\\nfiltered as (\\n\\n select *\\n from all_periods\\n where date_day <= cast(\\n\\n dateadd(\\n day,\\n 1,\\n cast(convert_timezone('UTC', 'America/Los_Angeles',\\n cast(convert_timezone('UTC', current_timestamp()) as timestamp)\\n) as date)\\n )\\n\\n as date)\\n\\n)\\n\\nselect * from filtered\\n\\n\\n\\n)\\nselect\\n cast(d.date_day as timestamp) as date_day\\nfrom\\n date_spine d\\n\\n\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "database": "acme", + "dependsOn": [ + "macro.dbt_date.get_base_dates" + ], + "description": "", + "environmentId": 132676, + "materializedType": "table", + "meta": {}, + "name": "metricflow_time_spine", + "packageName": "jaffle_shop", + "rawCode": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n{{ dbt_date.get_base_dates(n_dateparts=365*10, datepart=\"day\") }}\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "rawSql": "-- metricflow_time_spine.sql\\nwith days as (\\n --for BQ adapters use \"DATE('01/01/2000','mm/dd/yyyy')\"\\n{{ dbt_date.get_base_dates(n_dateparts=365*10, datepart=\"day\") }}\\n),\\n\\nfinal as (\\n select cast(date_day as date) as date_day\\n from days\\n)\\n\\nselect *\\nfrom final", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:42.495Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:36.466Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.metricflow_time_spine" + }, + { + "alias": "order_items", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDER_ITEM_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDERED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SUBTOTAL", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "IS_FOOD_ITEM", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "IS_DRINK_ITEM", + "tags": [], + "type": "NUMBER" + } + ], + "compiledCode": "\\n\\nwith order_items as (\\n\\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\n\\norders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "compiledSql": "\\n\\nwith order_items as (\\n\\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\n\\norders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "database": "acme", + "dependsOn": [ + "model.jaffle_shop.stg_order_items", + "model.jaffle_shop.stg_orders", + "model.jaffle_shop.stg_products" + ], + "description": "", + "environmentId": 132676, + "materializedType": "table", + "meta": {}, + "name": "order_items", + "packageName": "jaffle_shop", + "rawCode": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_item_id'\\n )\\n}}\\n\\nwith order_items as (\\n\\n select * from {{ ref('stg_order_items') }}\\n\\n),\\n\\n\\norders as (\\n \\n select * from {{ ref('stg_orders')}}\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_item_id'\\n )\\n}}\\n\\nwith order_items as (\\n\\n select * from {{ ref('stg_order_items') }}\\n\\n),\\n\\n\\norders as (\\n \\n select * from {{ ref('stg_orders')}}\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n\\n),\\n\\n\\nfinal as (\\n select\\n order_items.*,\\n orders.ordered_at,\\n products.product_price as subtotal,\\n products.is_food_item,\\n products.is_drink_item\\n from order_items\\n\\n left join products on order_items.product_id = products.product_id\\n -- left join order_supplies_summary on order_items.order_id = order_supplies_summary.product_id\\n left join orders on order_items.order_id = orders.order_id\\n)\\n\\nselect * from final", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:01.794Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:39.273Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.order_items" + }, + { + "alias": "orders", + "columns": [ + { + "comment": null, + "description": "The unique key of the orders mart.", + "meta": {}, + "name": "ORDER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": "The foreign key relating to the location the order was placed at.", + "meta": {}, + "name": "LOCATION_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": "The foreign key relating to the customer who placed the order.", + "meta": {}, + "name": "CUSTOMER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": "The total amount of the order in USD including tax.", + "meta": {}, + "name": "ORDER_TOTAL", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TAX_PAID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": "The timestamp the order was placed at.", + "meta": {}, + "name": "ORDERED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": "A boolean indicating if this order included any food items.", + "meta": {}, + "name": "IS_FOOD_ORDER", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": null, + "description": "A boolean indicating if this order included any drink items.", + "meta": {}, + "name": "IS_DRINK_ORDER", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": null, + "description": "The sum of supply expenses to fulfill the order.", + "meta": {}, + "name": "ORDER_COST", + "tags": [], + "type": "NUMBER" + } + ], + "compiledCode": "\\n\\n\\nwith orders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n\\n),\\n\\norder_items as (\\n \\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n),\\n\\nsupplies as (\\n\\n select * from acme.jaffle_shop.stg_supplies\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "compiledSql": "\\n\\n\\nwith orders as (\\n \\n select * from acme.jaffle_shop.stg_orders\\n\\n),\\n\\norder_items as (\\n \\n select * from acme.jaffle_shop.stg_order_items\\n\\n),\\n\\nproducts as (\\n\\n select * from acme.jaffle_shop.stg_products\\n),\\n\\nsupplies as (\\n\\n select * from acme.jaffle_shop.stg_supplies\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "database": "acme", + "dependsOn": [ + "model.jaffle_shop.stg_order_items", + "model.jaffle_shop.stg_orders", + "model.jaffle_shop.stg_products", + "model.jaffle_shop.stg_supplies" + ], + "description": "Order overview data mart, offering key details for each order inlcluding if it's a customer's first order and a food vs. drink item breakdown. One row per order.", + "environmentId": 132676, + "materializedType": "table", + "meta": {}, + "name": "orders", + "packageName": "jaffle_shop", + "rawCode": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\n\\nwith orders as (\\n \\n select * from {{ ref('stg_orders')}}\\n\\n),\\n\\norder_items as (\\n \\n select * from {{ ref('stg_order_items')}}\\n\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n),\\n\\nsupplies as (\\n\\n select * from {{ ref('stg_supplies') }}\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\n\\nwith orders as (\\n \\n select * from {{ ref('stg_orders')}}\\n\\n),\\n\\norder_items as (\\n \\n select * from {{ ref('stg_order_items')}}\\n\\n),\\n\\nproducts as (\\n\\n select * from {{ ref('stg_products') }}\\n),\\n\\nsupplies as (\\n\\n select * from {{ ref('stg_supplies') }}\\n\\n),\\n\\n\\norder_items_summary as (\\n\\n select\\n\\n order_items.order_id,\\n\\n sum(supplies.supply_cost) as order_cost,\\n sum(is_food_item) as count_food_items,\\n sum(is_drink_item) as count_drink_items\\n\\n\\n from order_items\\n\\n left join supplies on order_items.product_id = supplies.product_id\\n left join products on order_items.product_id = products.product_id\\n\\n group by 1\\n\\n),\\n\\n\\nfinal as (\\n select\\n\\n orders.*,\\n count_food_items > 0 as is_food_order,\\n count_drink_items > 0 as is_drink_order,\\n order_cost\\n\\n from orders\\n \\n left join order_items_summary on orders.order_id = order_items_summary.order_id\\n)\\n\\nselect * from final", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:01.813Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:39.404Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.orders" + }, + { + "alias": "stg_customers", + "columns": [ + { + "comment": null, + "description": "The unique key for each customer.", + "meta": {}, + "name": "CUSTOMER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "CUSTOMER_NAME", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [], + "description": "Customer data with basic cleaning and transformation applied, one row per customer.", + "environmentId": 132676, + "materializedType": "view", + "meta": {}, + "name": "stg_customers", + "packageName": "jaffle_shop", + "rawCode": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_customers\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as customer_id,\\n\\n ---------- properties\\n name as customer_name\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:41.339Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:35.949Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_customers" + }, + { + "alias": "stg_locations", + "columns": [ + { + "comment": null, + "description": "The unique key for each location.", + "meta": {}, + "name": "LOCATION_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "LOCATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TAX_RATE", + "tags": [], + "type": "FLOAT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "OPENED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- \\n -- where opened_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- \\n -- where opened_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [ + "macro.dbt.current_timestamp" + ], + "description": "List of open locations with basic cleaning and transformation applied, one row per location.", + "environmentId": 132676, + "materializedType": "view", + "meta": {}, + "name": "stg_locations", + "packageName": "jaffle_shop", + "rawCode": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- {# data runs to 2026, truncate timespan to desired range, \\n -- current time as default #}\\n -- where opened_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_stores\\n\\n -- {# data runs to 2026, truncate timespan to desired range, \\n -- current time as default #}\\n -- where opened_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as location_id,\\n\\n ---------- properties\\n name as location_name,\\n tax_rate,\\n\\n ---------- timestamp\\n opened_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:41.217Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:35.994Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_locations" + }, + { + "alias": "stg_order_items", + "columns": [ + { + "comment": null, + "description": "The unique key for each order item.", + "meta": {}, + "name": "ORDER_ITEM_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_ID", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [], + "description": "Individual food and drink items that make up our orders, one row per item.", + "environmentId": 132676, + "materializedType": "view", + "meta": {}, + "name": "stg_order_items", + "packageName": "jaffle_shop", + "rawCode": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_items\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_item_id,\\n order_id,\\n\\n ---------- properties\\n sku as product_id\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:41.336Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:35.953Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_order_items" + }, + { + "alias": "stg_orders", + "columns": [ + { + "comment": null, + "description": "The unique key for each order.", + "meta": {}, + "name": "ORDER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "LOCATION_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "CUSTOMER_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDER_TOTAL", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TAX_PAID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "ORDERED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= convert_timezone('UTC', current_timestamp())\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [ + "macro.dbt.current_timestamp" + ], + "description": "Order data with basic cleaning and transformation applied, one row per order.", + "environmentId": 132676, + "materializedType": "table", + "meta": {}, + "name": "stg_orders", + "packageName": "jaffle_shop", + "rawCode": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "{{\\n config(\\n materialized = 'table',\\n unique_key = 'order_id'\\n )\\n}}\\n\\nwith\\n\\nsource as (\\n\\n select * from raw_orders\\n\\n -- data runs to 2026, truncate timespan to desired range,\\n -- current time as default\\n -- where ordered_at <= {{ var('truncate_timespan_to') }}\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n id as order_id,\\n store_id as location_id,\\n customer as customer_id,\\n\\n ---------- properties\\n (order_total / 100.0) as order_total,\\n (tax_paid / 100.0) as tax_paid,\\n\\n ---------- timestamps\\n ordered_at\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:43.674Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:37.610Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_orders" + }, + { + "alias": "stg_products", + "columns": [ + { + "comment": null, + "description": "The unique key for each product.", + "meta": {}, + "name": "PRODUCT_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_TYPE", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_DESCRIPTION", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_PRICE", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "IS_FOOD_ITEM", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "IS_DRINK_ITEM", + "tags": [], + "type": "NUMBER" + } + ], + "compiledCode": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [], + "description": "Product (food and drink items that can be ordered) data with basic cleaning and transformation applied, one row per product.", + "environmentId": 132676, + "materializedType": "view", + "meta": {}, + "name": "stg_products", + "packageName": "jaffle_shop", + "rawCode": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_products\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n sku as product_id,\\n\\n ---------- properties\\n name as product_name,\\n type as product_type,\\n description as product_description,\\n (price / 100.0) as product_price,\\n\\n\\n ---------- derived\\n case\\n when type = 'jaffle' then 1\\n else 0\\n end as is_food_item,\\n\\n case\\n when type = 'beverage' then 1\\n else 0\\n end as is_drink_item\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:42.359Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:36.921Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_products" + }, + { + "alias": "stg_supplies", + "columns": [ + { + "comment": null, + "description": "The unique key of our supplies per cost.", + "meta": {}, + "name": "SUPPLY_UUID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SUPPLY_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "PRODUCT_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SUPPLY_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SUPPLY_COST", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "IS_PERISHABLE_SUPPLY", + "tags": [], + "type": "BOOLEAN" + } + ], + "compiledCode": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n \\n \\nmd5(cast(coalesce(cast(id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sku as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "compiledSql": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n \\n \\nmd5(cast(coalesce(cast(id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sku as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "database": "acme", + "dependsOn": [ + "macro.dbt_utils.generate_surrogate_key" + ], + "description": "List of our supply expenses data with basic cleaning and transformation applied.\\nOne row per supply cost, not per supply. As supply costs fluctuate they receive a new row with a new UUID. Thus there can be multiple rows per supply_id.\\n", + "environmentId": 132676, + "materializedType": "view", + "meta": {}, + "name": "stg_supplies", + "packageName": "jaffle_shop", + "rawCode": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n {{ dbt_utils.generate_surrogate_key(['id', 'sku']) }} as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "rawSql": "with\\n\\nsource as (\\n\\n select * from raw_supplies\\n\\n),\\n\\nrenamed as (\\n\\n select\\n\\n ---------- ids\\n {{ dbt_utils.generate_surrogate_key(['id', 'sku']) }} as supply_uuid,\\n id as supply_id,\\n sku as product_id,\\n\\n ---------- properties\\n name as supply_name,\\n (cost / 100.0) as supply_cost,\\n perishable as is_perishable_supply\\n\\n from source\\n\\n)\\n\\nselect * from renamed", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:00:42.365Z" + }, + { + "status": "success", + "executeCompletedAt": "2024-07-24T04:01:37.035Z" + } + ], + "schema": "jaffle_shop", + "tags": [], + "uniqueId": "model.jaffle_shop.stg_supplies" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_models/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/london_bike_analysis.json new file mode 100644 index 00000000..d0153188 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/london_bike_analysis.json @@ -0,0 +1,700 @@ +{ + "job": { + "models": [ + { + "alias": "cleaned_bike_rides", + "columns": [ + { + "comment": "The total minutes of a particular journey in a month and general time of day.", + "description": "The total minutes of a particular journey in a month and general time of day.", + "meta": {}, + "name": "TOTAL_MINUTES", + "tags": [ + "aggregates" + ], + "type": "NUMBER" + }, + { + "comment": "Total number of bike hires of the same journey in a particular month and time of day", + "description": "Total number of bike hires of the same journey in a particular month and time of day", + "meta": {}, + "name": "TOTAL_BIKE_HIRES", + "tags": [ + "aggregates" + ], + "type": "NUMBER" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "AVERAGE_DURATION", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Month the bike hire was in", + "description": "Month the bike hire was in", + "meta": {}, + "name": "MONTH", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": null, + "meta": {}, + "name": "START_PEAK_TRAVEL", + "tags": [], + "type": "TEXT" + }, + { + "comment": "", + "description": null, + "meta": {}, + "name": "SAME_STATION_FLAG", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": null, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "END_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "END_STATION_NAME", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration_minutes) as total_minutes\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.ride_share.raw_bike_hires\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_minutes DESC", + "compiledSql": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration_minutes) as total_minutes\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.ride_share.raw_bike_hires\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_minutes DESC", + "database": "ACME", + "dependsOn": [ + "model.london_bike_analysis.raw_bike_hires" + ], + "description": "This table contains a transformed version of the raw_bike_hires table, which includes additional calculated fields such as creating a duration in minutes field. Each ride has been aggregated so any journey that starts and ends at the same station, in the same month and roughly time of day are aggregated together to get the total minutes similar journeys have taken\n", + "environmentId": 114017, + "materializedType": "table", + "meta": { + "dbt_tags": [ + "pii", + "marketplace", + "apps" + ], + "data_product_manager": "kirit" + }, + "name": "cleaned_bike_rides", + "packageName": "london_bike_analysis", + "rawCode": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration_minutes) as total_minutes\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('raw_bike_hires') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_minutes DESC", + "rawSql": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration_minutes) as total_minutes\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration_minutes) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('raw_bike_hires') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_minutes DESC", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:55.507Z" + } + ], + "schema": "ride_share", + "tags": [ + "bike_ride_data" + ], + "uniqueId": "model.london_bike_analysis.cleaned_bike_rides" + }, + { + "alias": "cleaned_bike_rides_from_snapshot", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_SECONDS", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_BIKE_HIRES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "AVERAGE_DURATION", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "MONTH", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_PEAK_TRAVEL", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SAME_STATION_FLAG", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_NAME", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration) as total_seconds\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.snapshots.cycle_hire_snapshot\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_seconds DESC", + "compiledSql": "-- Adding extra fields including if the bike was rented during peak time \nSELECT\n SUM(duration) as total_seconds\n , COUNT(rental_id) as total_bike_hires\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\n , EXTRACT(month from start_date) as month\n , CASE\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\n ELSE 'Off-Peak'\n END AS start_peak_travel\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\n , start_station_id\n , start_station_name\n , end_station_id\n , end_station_name\nFROM ACME.snapshots.cycle_hire_snapshot\nGROUP BY 4,5,6,7,8,9,10\nORDER BY total_seconds DESC", + "database": "ACME", + "dependsOn": [ + "snapshot.london_bike_analysis.cycle_hire_snapshot" + ], + "description": "", + "environmentId": 114017, + "materializedType": "table", + "meta": {}, + "name": "cleaned_bike_rides_from_snapshot", + "packageName": "london_bike_analysis", + "rawCode": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration) as total_seconds\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('cycle_hire_snapshot') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_seconds DESC", + "rawSql": "-- Adding extra fields including if the bike was rented during peak time \r\nSELECT\r\n SUM(duration) as total_seconds\r\n , COUNT(rental_id) as total_bike_hires\r\n , ROUND(SUM(duration) / COUNT(rental_id), 2) AS average_duration\r\n , EXTRACT(month from start_date) as month\r\n , CASE\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 6 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 10 THEN 'Morning Peak'\r\n WHEN EXTRACT(HOUR from TO_TIMESTAMP(start_date)) >= 16 AND EXTRACT(HOUR from TO_TIMESTAMP(start_date)) <= 19 THEN 'Evening Peak'\r\n ELSE 'Off-Peak'\r\n END AS start_peak_travel\r\n , IFF(start_station_id = end_station_id, True, False) as same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ ref('cycle_hire_snapshot') }}\r\nGROUP BY 4,5,6,7,8,9,10\r\nORDER BY total_seconds DESC", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:53.846Z" + } + ], + "schema": "ride_share", + "tags": [], + "uniqueId": "model.london_bike_analysis.cleaned_bike_rides_from_snapshot" + }, + { + "alias": "raw_bike_hires", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "RENTAL_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "DURATION_SECONDS", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "DURATION_MINUTES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "BIKE_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_NAME", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "SELECT \n rental_id\n , duration as duration_seconds\n , duration / 60 as duration_minutes\n , bike_id\n , start_date\n , start_station_id\n , start_station_name\n , end_date\n , end_station_id\n , end_station_name\nFROM ACME.berlin_bicycles.cycle_hire\nWHERE EXTRACT(year from start_date) = 2017", + "compiledSql": "SELECT \n rental_id\n , duration as duration_seconds\n , duration / 60 as duration_minutes\n , bike_id\n , start_date\n , start_station_id\n , start_station_name\n , end_date\n , end_station_id\n , end_station_name\nFROM ACME.berlin_bicycles.cycle_hire\nWHERE EXTRACT(year from start_date) = 2017", + "database": "ACME", + "dependsOn": [ + "source.london_bike_analysis.berlin_bicycles.cycle_hire" + ], + "description": "This table contains all bike hires in London in 2017. This is the raw dataset so no cleaning or transformation.", + "environmentId": 114017, + "materializedType": "table", + "meta": {}, + "name": "raw_bike_hires", + "packageName": "london_bike_analysis", + "rawCode": "SELECT \r\n rental_id\r\n , duration as duration_seconds\r\n , duration / 60 as duration_minutes\r\n , bike_id\r\n , start_date\r\n , start_station_id\r\n , start_station_name\r\n , end_date\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ source('berlin_bicycles', 'cycle_hire') }}\r\nWHERE EXTRACT(year from start_date) = 2017", + "rawSql": "SELECT \r\n rental_id\r\n , duration as duration_seconds\r\n , duration / 60 as duration_minutes\r\n , bike_id\r\n , start_date\r\n , start_station_id\r\n , start_station_name\r\n , end_date\r\n , end_station_id\r\n , end_station_name\r\nFROM {{ source('berlin_bicycles', 'cycle_hire') }}\r\nWHERE EXTRACT(year from start_date) = 2017", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:53.520Z" + } + ], + "schema": "ride_share", + "tags": [], + "uniqueId": "model.london_bike_analysis.raw_bike_hires" + }, + { + "alias": "raw_bike_stations", + "columns": [ + { + "comment": "Primary Key", + "description": "Primary Key", + "meta": {}, + "name": "ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "BIKES_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "DOCKS_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "INSTALL_DATE", + "tags": [], + "type": "DATE" + }, + { + "comment": "", + "description": "", + "meta": {}, + "name": "REMOVAL_DATE", + "tags": [], + "type": "DATE" + } + ], + "compiledCode": "SELECT \n id\n , name as station_name\n , bikes_count\n , docks_count\n , install_date\n , removal_date\nFROM ACME.berlin_bicycles.cycle_stations\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "compiledSql": "SELECT \n id\n , name as station_name\n , bikes_count\n , docks_count\n , install_date\n , removal_date\nFROM ACME.berlin_bicycles.cycle_stations\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "database": "ACME", + "dependsOn": [ + "source.london_bike_analysis.berlin_bicycles.cycle_stations" + ], + "description": "This table contains all bike stations in the London area. This only includes stations intalled before January 1, 2017 and doesn't include stations that were removed in 2017 (before Jan 1 2018). This is the raw data so no cleaning or transformation.", + "environmentId": 114017, + "materializedType": "table", + "meta": {}, + "name": "raw_bike_stations", + "packageName": "london_bike_analysis", + "rawCode": "SELECT \r\n id\r\n , name as station_name\r\n , bikes_count\r\n , docks_count\r\n , install_date\r\n , removal_date\r\nFROM {{ source('berlin_bicycles', 'cycle_stations') }}\r\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "rawSql": "SELECT \r\n id\r\n , name as station_name\r\n , bikes_count\r\n , docks_count\r\n , install_date\r\n , removal_date\r\nFROM {{ source('berlin_bicycles', 'cycle_stations') }}\r\nWHERE install_date < '2017-01-01' and (removal_date < '2018-01-01' or removal_date is null)", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:53.449Z" + } + ], + "schema": "ride_share", + "tags": [], + "uniqueId": "model.london_bike_analysis.raw_bike_stations" + }, + { + "alias": "rides_by_month_2017", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_MINUTES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_BIKE_HIRES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "AVERAGE_DURATION", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "MONTH", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_PEAK_TRAVEL", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SAME_STATION_FLAG", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_BIKES_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_DOCKS_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_INSTALL_DATE", + "tags": [], + "type": "DATE" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_BIKES_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_DOCKS_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "END_STATION_INSTALL_DATE", + "tags": [], + "type": "DATE" + } + ], + "compiledCode": "WITH stations AS (\n\n SELECT *\n FROM ACME.ride_share.raw_bike_stations\n\n),\n\nrides AS (\n\n SELECT *\n FROM ACME.ride_share.cleaned_bike_rides\n\n),\n\nstart_stat_join AS (\n\n SELECT rides.*\n , stations.bikes_count as start_station_bikes_count\n , stations.docks_count as start_station_docks_count\n , stations.install_date as start_station_install_date\n FROM rides\n LEFT JOIN stations\n ON rides.start_station_id = stations.id\n)\n\nSELECT \n total_minutes \n , total_bike_hires \n , average_duration \n , month \n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count \n , start_station_docks_count \n , start_station_install_date \n , end_station_id\n , end_station_name\n , stations.bikes_count as end_station_bikes_count\n , stations.docks_count as end_station_docks_count\n , stations.install_date as end_station_install_date\nFROM start_stat_join\nLEFT JOIN stations\nON start_stat_join.end_station_id = stations.id", + "compiledSql": "WITH stations AS (\n\n SELECT *\n FROM ACME.ride_share.raw_bike_stations\n\n),\n\nrides AS (\n\n SELECT *\n FROM ACME.ride_share.cleaned_bike_rides\n\n),\n\nstart_stat_join AS (\n\n SELECT rides.*\n , stations.bikes_count as start_station_bikes_count\n , stations.docks_count as start_station_docks_count\n , stations.install_date as start_station_install_date\n FROM rides\n LEFT JOIN stations\n ON rides.start_station_id = stations.id\n)\n\nSELECT \n total_minutes \n , total_bike_hires \n , average_duration \n , month \n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count \n , start_station_docks_count \n , start_station_install_date \n , end_station_id\n , end_station_name\n , stations.bikes_count as end_station_bikes_count\n , stations.docks_count as end_station_docks_count\n , stations.install_date as end_station_install_date\nFROM start_stat_join\nLEFT JOIN stations\nON start_stat_join.end_station_id = stations.id", + "database": "ACME", + "dependsOn": [ + "model.london_bike_analysis.cleaned_bike_rides", + "model.london_bike_analysis.raw_bike_stations" + ], + "description": "", + "environmentId": 114017, + "materializedType": "table", + "meta": {}, + "name": "rides_by_month_2017", + "packageName": "london_bike_analysis", + "rawCode": "WITH stations AS (\r\n\r\n SELECT *\r\n FROM {{ ref('raw_bike_stations') }}\r\n\r\n),\r\n\r\nrides AS (\r\n\r\n SELECT *\r\n FROM {{ ref('cleaned_bike_rides') }}\r\n\r\n),\r\n\r\nstart_stat_join AS (\r\n\r\n SELECT rides.*\r\n , stations.bikes_count as start_station_bikes_count\r\n , stations.docks_count as start_station_docks_count\r\n , stations.install_date as start_station_install_date\r\n FROM rides\r\n LEFT JOIN stations\r\n ON rides.start_station_id = stations.id\r\n)\r\n\r\nSELECT \r\n total_minutes \r\n , total_bike_hires \r\n , average_duration \r\n , month \r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count \r\n , start_station_docks_count \r\n , start_station_install_date \r\n , end_station_id\r\n , end_station_name\r\n , stations.bikes_count as end_station_bikes_count\r\n , stations.docks_count as end_station_docks_count\r\n , stations.install_date as end_station_install_date\r\nFROM start_stat_join\r\nLEFT JOIN stations\r\nON start_stat_join.end_station_id = stations.id", + "rawSql": "WITH stations AS (\r\n\r\n SELECT *\r\n FROM {{ ref('raw_bike_stations') }}\r\n\r\n),\r\n\r\nrides AS (\r\n\r\n SELECT *\r\n FROM {{ ref('cleaned_bike_rides') }}\r\n\r\n),\r\n\r\nstart_stat_join AS (\r\n\r\n SELECT rides.*\r\n , stations.bikes_count as start_station_bikes_count\r\n , stations.docks_count as start_station_docks_count\r\n , stations.install_date as start_station_install_date\r\n FROM rides\r\n LEFT JOIN stations\r\n ON rides.start_station_id = stations.id\r\n)\r\n\r\nSELECT \r\n total_minutes \r\n , total_bike_hires \r\n , average_duration \r\n , month \r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count \r\n , start_station_docks_count \r\n , start_station_install_date \r\n , end_station_id\r\n , end_station_name\r\n , stations.bikes_count as end_station_bikes_count\r\n , stations.docks_count as end_station_docks_count\r\n , stations.install_date as end_station_install_date\r\nFROM start_stat_join\r\nLEFT JOIN stations\r\nON start_stat_join.end_station_id = stations.id", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:57.229Z" + } + ], + "schema": "ride_share", + "tags": [], + "uniqueId": "model.london_bike_analysis.rides_by_month_2017" + }, + { + "alias": "rides_by_month_start_station_2017", + "columns": [ + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_MINUTES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_HOURS", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "TOTAL_BIKE_HIRES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "AVERAGE_DURATION_IN_MINUTES", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "MONTH", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_PEAK_TRAVEL", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "SAME_STATION_FLAG", + "tags": [], + "type": "BOOLEAN" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_BIKES_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_DOCKS_COUNT", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "meta": {}, + "name": "START_STATION_INSTALL_DATE", + "tags": [], + "type": "DATE" + } + ], + "compiledCode": "SELECT \n SUM(total_minutes) AS total_minutes\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\n , SUM(total_bike_hires) AS total_bike_hires\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\n , month\n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count\n , start_station_docks_count\n , start_station_install_date\nFROM ACME.ride_share.rides_by_month_2017\nGROUP BY 5,6,7,8,9,10,11,12\nORDER BY total_bike_hires DESC", + "compiledSql": "SELECT \n SUM(total_minutes) AS total_minutes\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\n , SUM(total_bike_hires) AS total_bike_hires\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\n , month\n , start_peak_travel\n , same_station_flag\n , start_station_id\n , start_station_name\n , start_station_bikes_count\n , start_station_docks_count\n , start_station_install_date\nFROM ACME.ride_share.rides_by_month_2017\nGROUP BY 5,6,7,8,9,10,11,12\nORDER BY total_bike_hires DESC", + "database": "ACME", + "dependsOn": [ + "model.london_bike_analysis.rides_by_month_2017" + ], + "description": "", + "environmentId": 114017, + "materializedType": "table", + "meta": {}, + "name": "rides_by_month_start_station_2017", + "packageName": "london_bike_analysis", + "rawCode": "SELECT \r\n SUM(total_minutes) AS total_minutes\r\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\r\n , SUM(total_bike_hires) AS total_bike_hires\r\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\r\n , month\r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count\r\n , start_station_docks_count\r\n , start_station_install_date\r\nFROM {{ ref('rides_by_month_2017') }}\r\nGROUP BY 5,6,7,8,9,10,11,12\r\nORDER BY total_bike_hires DESC", + "rawSql": "SELECT \r\n SUM(total_minutes) AS total_minutes\r\n , ROUND(SUM(total_minutes) / 60 ,2) AS total_hours\r\n , SUM(total_bike_hires) AS total_bike_hires\r\n , ROUND(SUM(total_minutes) / SUM(total_bike_hires), 2) AS average_duration_in_minutes\r\n , month\r\n , start_peak_travel\r\n , same_station_flag\r\n , start_station_id\r\n , start_station_name\r\n , start_station_bikes_count\r\n , start_station_docks_count\r\n , start_station_install_date\r\nFROM {{ ref('rides_by_month_2017') }}\r\nGROUP BY 5,6,7,8,9,10,11,12\r\nORDER BY total_bike_hires DESC", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-07-25T00:00:58.643Z" + } + ], + "schema": "ride_share", + "tags": [], + "uniqueId": "model.london_bike_analysis.rides_by_month_start_station_2017" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_models/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/metaphor_subscriptions.json new file mode 100644 index 00000000..6c15025b --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_models/metaphor_subscriptions.json @@ -0,0 +1,438 @@ +{ + "job": { + "models": [ + { + "alias": "churn_region_agg", + "columns": [ + { + "comment": "Auto-generated ID", + "description": "Auto-generated ID", + "meta": {}, + "name": "ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Customer ID", + "description": "Customer ID", + "meta": {}, + "name": "CUS_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "The reason for churn", + "description": "The reason for churn", + "meta": {}, + "name": "REASON", + "tags": [], + "type": "TEXT" + }, + { + "comment": "Timestamp at which the subscription is created.", + "description": "Timestamp at which the subscription is created.", + "meta": {}, + "name": "SUB_DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": "Timestamp at which the subscription is cancelled.", + "description": "Timestamp at which the subscription is cancelled.", + "meta": {}, + "name": "CANCEL_DATE", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "compiledSql": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "database": "DEMO_DB", + "dependsOn": [ + "model.metaphor_subscriptions.subscriptions_growth" + ], + "description": "This dataset contains info about churn, all the accounts that have cancelled the subscriptions in the past.", + "environmentId": 151761, + "materializedType": "table", + "meta": { + "owner": [ + "mars@metaphor.io", + "pardhu@metaphor.io" + ] + }, + "name": "churn_region_agg", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "rawSql": "select \n 1 as id,\n subscriptions_growth.customer_id as cus_id,\n 'no' as reason,\n subscriptions_growth.created_at as sub_date,\n '2014-01-01 16:00:00' as cancel_date\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:13.013Z" + } + ], + "schema": "METAPHOR", + "tags": [], + "uniqueId": "model.metaphor_subscriptions.churn_region_agg" + }, + { + "alias": "modular_campaigns", + "columns": [ + { + "comment": "Auto-generated ID", + "description": "Auto-generated ID", + "meta": {}, + "name": "ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Campaign name", + "description": "Campaign name", + "meta": {}, + "name": "NAME", + "tags": [ + "pii", + "name" + ], + "type": "TEXT" + }, + { + "comment": "Point of contact", + "description": "Point of contact", + "meta": {}, + "name": "POC", + "tags": [], + "type": "TEXT" + }, + { + "comment": "Budget for the campaign in dollars", + "description": "Budget for the campaign in dollars", + "meta": {}, + "name": "BUDGET", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Creation timestamp", + "description": "Creation timestamp", + "meta": {}, + "name": "DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom DEMO_DB.METAPHOR.churn_region_agg as churn_region_agg", + "compiledSql": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom DEMO_DB.METAPHOR.churn_region_agg as churn_region_agg", + "database": "DEMO_DB", + "dependsOn": [ + "model.metaphor_subscriptions.churn_region_agg" + ], + "description": "This dataset contains info about all modular campaigns. The id represents campaign id.", + "environmentId": 151761, + "materializedType": "table", + "meta": { + "owner": [ + "mars@metaphor.io", + "pardhu@metaphor.io" + ] + }, + "name": "modular_campaigns", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom {{ ref('churn_region_agg') }} as churn_region_agg", + "rawSql": "select \n 1 as id,\n 'name' as name,\n 'poc' as poc,\n 100 as budget,\n churn_region_agg.sub_date as date\nfrom {{ ref('churn_region_agg') }} as churn_region_agg", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:14.419Z" + } + ], + "schema": "METAPHOR", + "tags": [], + "uniqueId": "model.metaphor_subscriptions.modular_campaigns" + }, + { + "alias": "subscriptions_core", + "columns": [ + { + "comment": "Subscription ID", + "description": "Subscription ID", + "meta": {}, + "name": "SUB_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "URL for the subscription", + "description": "URL for the subscription", + "meta": {}, + "name": "URL", + "tags": [], + "type": "TEXT" + }, + { + "comment": "Creation timestamp", + "description": "Creation timestamp", + "meta": {}, + "name": "CREATED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "compiledSql": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "database": "DEMO_DB", + "dependsOn": [ + "source.metaphor_subscriptions.METAPHOR.CUSTOMER_PROFILE" + ], + "description": "This dataset contains all subscriptions core info.", + "environmentId": 151761, + "materializedType": "table", + "meta": {}, + "name": "subscriptions_core", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "rawSql": "select \n 1 as sub_id,\n 'url' as url,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:09.507Z" + } + ], + "schema": "METAPHOR", + "tags": [ + "subscription" + ], + "uniqueId": "model.metaphor_subscriptions.subscriptions_core" + }, + { + "alias": "subscriptions_growth", + "columns": [ + { + "comment": "Unique ID of the Subscription. This is generated at the time of creation of a new subscription.", + "description": "Unique ID of the Subscription. This is generated at the time of creation of a new subscription.", + "meta": {}, + "name": "SUB_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Unique ID of the Customer. This is generated end of registration flow for a user.", + "description": "Unique ID of the Customer. This is generated end of registration flow for a user.", + "meta": {}, + "name": "CUSTOMER_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Timestamp at which the subscription is created. Always represented in Pacific Time Zone irrespective of the location of the user.", + "description": "Timestamp at which the subscription is created. Always represented in Pacific Time Zone irrespective of the location of the user.", + "meta": {}, + "name": "CREATED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom DEMO_DB.METAPHOR.SUBSCRIPTIONS_CHANGE_RAW as change_raw\ninner join DEMO_DB.METAPHOR.SUBSCRIPTIONS_BASE as base on base.sub_id = change_raw.sub_id\ninner join DEMO_DB.METAPHOR.subscriptions_v2 as v2 on v2.sub_id = change_raw.sub_id", + "compiledSql": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom DEMO_DB.METAPHOR.SUBSCRIPTIONS_CHANGE_RAW as change_raw\ninner join DEMO_DB.METAPHOR.SUBSCRIPTIONS_BASE as base on base.sub_id = change_raw.sub_id\ninner join DEMO_DB.METAPHOR.subscriptions_v2 as v2 on v2.sub_id = change_raw.sub_id", + "database": "DEMO_DB", + "dependsOn": [ + "model.metaphor_subscriptions.subscriptions_v2", + "source.metaphor_subscriptions.METAPHOR.SUBSCRIPTIONS_BASE", + "source.metaphor_subscriptions.METAPHOR.SUBSCRIPTIONS_CHANGE_RAW" + ], + "description": "This dataset represents all the subscription info of our product. Each subscription is represented by unique subs_id.", + "environmentId": 151761, + "materializedType": "table", + "meta": { + "owner": [ + "mars@metaphor.io", + "pardhu@metaphor.io" + ] + }, + "name": "subscriptions_growth", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom {{ source('METAPHOR', 'SUBSCRIPTIONS_CHANGE_RAW') }} as change_raw\ninner join {{ source('METAPHOR', 'SUBSCRIPTIONS_BASE') }} as base on base.sub_id = change_raw.sub_id\ninner join {{ ref('subscriptions_v2') }} as v2 on v2.sub_id = change_raw.sub_id", + "rawSql": "select \n change_raw.sub_id as sub_id,\n 1 as customer_id, \n base.created_at as created_at\nfrom {{ source('METAPHOR', 'SUBSCRIPTIONS_CHANGE_RAW') }} as change_raw\ninner join {{ source('METAPHOR', 'SUBSCRIPTIONS_BASE') }} as base on base.sub_id = change_raw.sub_id\ninner join {{ ref('subscriptions_v2') }} as v2 on v2.sub_id = change_raw.sub_id", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:11.401Z" + } + ], + "schema": "METAPHOR", + "tags": [ + "subscription", + "growth" + ], + "uniqueId": "model.metaphor_subscriptions.subscriptions_growth" + }, + { + "alias": "subscriptions_sales", + "columns": [ + { + "comment": "Auto-generated ID", + "description": "Auto-generated ID", + "meta": {}, + "name": "SALES_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Subscription ID", + "description": "Subscription ID", + "meta": {}, + "name": "SUB_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Sales representative ID", + "description": "Sales representative ID", + "meta": {}, + "name": "SALE_REP_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Size of the deals in dollars", + "description": "Size of the deals in dollars", + "meta": {}, + "name": "DEAL_SIZE", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Creation timestamp", + "description": "Creation timestamp", + "meta": {}, + "name": "CREATED_AT", + "tags": [], + "type": "TEXT" + } + ], + "compiledCode": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "compiledSql": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom DEMO_DB.METAPHOR.subscriptions_growth as subscriptions_growth", + "database": "DEMO_DB", + "dependsOn": [ + "model.metaphor_subscriptions.subscriptions_growth" + ], + "description": "This dataset represents the sales info for each subscription.", + "environmentId": 151761, + "materializedType": "table", + "meta": {}, + "name": "subscriptions_sales", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "rawSql": "select \n 1 as sales_id,\n subscriptions_growth.sub_id as sub_id,\n 1 as sale_rep_id,\n 1 as deal_size,\n '2014-01-01 16:00:00' as created_at\nfrom {{ ref('subscriptions_growth') }} as subscriptions_growth", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:13.048Z" + } + ], + "schema": "METAPHOR", + "tags": [ + "subscription" + ], + "uniqueId": "model.metaphor_subscriptions.subscriptions_sales" + }, + { + "alias": "subscriptions_v2", + "columns": [ + { + "comment": "Primary Key", + "description": "Primary Key", + "meta": {}, + "name": "SUB_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Subscription short name", + "description": "Subscription short name", + "meta": {}, + "name": "SHORT_NAME", + "tags": [ + "pii", + "name" + ], + "type": "TEXT" + }, + { + "comment": "Subscription full display name", + "description": "Subscription full display name", + "meta": {}, + "name": "LONG_NAME", + "tags": [ + "pii", + "name" + ], + "type": "TEXT" + }, + { + "comment": "Available regions, comma separated", + "description": "Available regions, comma separated", + "meta": {}, + "name": "REGION", + "tags": [], + "type": "TEXT" + }, + { + "comment": "Subscription price in cents", + "description": "Subscription price in cents", + "meta": {}, + "name": "PRICE", + "tags": [], + "type": "NUMBER" + }, + { + "comment": "Type of subscription renewal", + "description": "Type of subscription renewal", + "meta": {}, + "name": "RENEW_TYPE", + "tags": [], + "type": "TEXT" + }, + { + "comment": "Creation timestamp", + "description": "Creation timestamp", + "meta": {}, + "name": "CREATED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "compiledCode": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "compiledSql": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom DEMO_DB.METAPHOR.CUSTOMER_PROFILE as customer_profile", + "database": "DEMO_DB", + "dependsOn": [ + "source.metaphor_subscriptions.METAPHOR.CUSTOMER_PROFILE" + ], + "description": "This dataset contains all newer subscriptions info.", + "environmentId": 151761, + "materializedType": "table", + "meta": { + "owner": [ + "mars@metaphor.io", + "pardhu@metaphor.io" + ] + }, + "name": "subscriptions_v2", + "packageName": "metaphor_subscriptions", + "rawCode": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "rawSql": "select \n 1 as sub_id,\n 'short name' as short_name,\n 'long name' as long_name,\n 'region' as region,\n 1 as price,\n 'type' as renew_type,\n customer_profile.date as created_at\nfrom {{ source('METAPHOR', 'CUSTOMER_PROFILE') }} as customer_profile", + "runResults": [ + { + "status": "success", + "executeCompletedAt": "2024-06-05T05:01:09.524Z" + } + ], + "schema": "METAPHOR", + "tags": [ + "subscription" + ], + "uniqueId": "model.metaphor_subscriptions.subscriptions_v2" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/endpoint.py new file mode 100644 index 00000000..3de2102f --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunSnapshots(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/jaffle_shop.json new file mode 100644 index 00000000..b0af2966 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/jaffle_shop.json @@ -0,0 +1,5 @@ +{ + "job": { + "snapshots": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/london_bike_analysis.json new file mode 100644 index 00000000..eb7b7776 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/london_bike_analysis.json @@ -0,0 +1,179 @@ +{ + "job": { + "snapshots": [ + { + "alias": "cycle_hire_snapshot", + "columns": [ + { + "comment": null, + "description": null, + "index": 1, + "meta": {}, + "name": "RENTAL_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 2, + "meta": {}, + "name": "DURATION", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 3, + "meta": {}, + "name": "BIKE_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 4, + "meta": {}, + "name": "END_DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "index": 5, + "meta": {}, + "name": "END_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 6, + "meta": {}, + "name": "END_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "index": 7, + "meta": {}, + "name": "START_DATE", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "index": 8, + "meta": {}, + "name": "START_STATION_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 9, + "meta": {}, + "name": "START_STATION_NAME", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "index": 10, + "meta": {}, + "name": "END_STATION_LOGICAL_TERMINAL", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 11, + "meta": {}, + "name": "START_STATION_LOGICAL_TERMINAL", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 12, + "meta": {}, + "name": "END_STATION_PRIORITY_ID", + "tags": [], + "type": "NUMBER" + }, + { + "comment": null, + "description": null, + "index": 13, + "meta": {}, + "name": "PRICING_TIER", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "index": 14, + "meta": {}, + "name": "DBT_SCD_ID", + "tags": [], + "type": "TEXT" + }, + { + "comment": null, + "description": null, + "index": 15, + "meta": {}, + "name": "DBT_UPDATED_AT", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "index": 16, + "meta": {}, + "name": "DBT_VALID_FROM", + "tags": [], + "type": "TIMESTAMP_NTZ" + }, + { + "comment": null, + "description": null, + "index": 17, + "meta": {}, + "name": "DBT_VALID_TO", + "tags": [], + "type": "TIMESTAMP_NTZ" + } + ], + "comment": "", + "compiledCode": "\n\n\nselect * from ACME.berlin_bicycles.cycle_hire", + "compiledSql": "\n\n\nselect * from ACME.berlin_bicycles.cycle_hire", + "database": "ACME", + "description": "", + "environmentId": 114017, + "meta": {}, + "name": "cycle_hire_snapshot", + "owner": "METAPHOR_ROLE", + "packageName": "london_bike_analysis", + "rawCode": "\n{{\n config(\n target_schema='snapshots',\n strategy='check',\n unique_key='bike_id',\n check_cols=['start_date', 'end_date']\n )\n}}\n\nselect * from {{ source('berlin_bicycles', 'cycle_hire') }}\n", + "rawSql": "\n{{\n config(\n target_schema='snapshots',\n strategy='check',\n unique_key='bike_id',\n check_cols=['start_date', 'end_date']\n )\n}}\n\nselect * from {{ source('berlin_bicycles', 'cycle_hire') }}\n", + "schema": "snapshots", + "tags": [], + "uniqueId": "snapshot.london_bike_analysis.cycle_hire_snapshot" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/metaphor_subscriptions.json new file mode 100644 index 00000000..b0af2966 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_snapshots/metaphor_subscriptions.json @@ -0,0 +1,5 @@ +{ + "job": { + "snapshots": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/endpoint.py new file mode 100644 index 00000000..fcca1702 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunSources(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/jaffle_shop.json new file mode 100644 index 00000000..1ebbf0a0 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/jaffle_shop.json @@ -0,0 +1,5 @@ +{ + "job": { + "sources": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/london_bike_analysis.json new file mode 100644 index 00000000..2a551669 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/london_bike_analysis.json @@ -0,0 +1,128 @@ +{ + "job": { + "sources": [ + { + "columns": [ + { + "description": null, + "name": "RENTAL_ID" + }, + { + "description": "Duration of the bike trip in seconds.", + "name": "DURATION" + }, + { + "description": null, + "name": "BIKE_ID" + }, + { + "description": null, + "name": "END_DATE" + }, + { + "description": null, + "name": "END_STATION_ID" + }, + { + "description": null, + "name": "END_STATION_NAME" + }, + { + "description": null, + "name": "START_DATE" + }, + { + "description": null, + "name": "START_STATION_ID" + }, + { + "description": null, + "name": "START_STATION_NAME" + }, + { + "description": null, + "name": "END_STATION_LOGICAL_TERMINAL" + }, + { + "description": null, + "name": "START_STATION_LOGICAL_TERMINAL" + }, + { + "description": null, + "name": "END_STATION_PRIORITY_ID" + }, + { + "description": null, + "name": "PRICING_TIER" + } + ], + "database": "ACME", + "description": "", + "identifier": "cycle_hire", + "schema": "berlin_bicycles", + "uniqueId": "source.london_bike_analysis.berlin_bicycles.cycle_hire" + }, + { + "columns": [ + { + "description": null, + "name": "ID" + }, + { + "description": null, + "name": "INSTALLED" + }, + { + "description": null, + "name": "LATITUDE" + }, + { + "description": null, + "name": "LOCKED" + }, + { + "description": null, + "name": "LONGITUDE" + }, + { + "description": null, + "name": "NAME" + }, + { + "description": null, + "name": "BIKES_COUNT" + }, + { + "description": null, + "name": "DOCKS_COUNT" + }, + { + "description": null, + "name": "NBEMPTYDOCKS" + }, + { + "description": null, + "name": "TEMPORARY" + }, + { + "description": null, + "name": "TERMINAL_NAME" + }, + { + "description": null, + "name": "INSTALL_DATE" + }, + { + "description": null, + "name": "REMOVAL_DATE" + } + ], + "database": "ACME", + "description": "", + "identifier": "cycle_stations", + "schema": "berlin_bicycles", + "uniqueId": "source.london_bike_analysis.berlin_bicycles.cycle_stations" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/metaphor_subscriptions.json new file mode 100644 index 00000000..944151c3 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_sources/metaphor_subscriptions.json @@ -0,0 +1,97 @@ +{ + "job": { + "sources": [ + { + "columns": [ + { + "description": "Auto-generated ID", + "name": "ID" + }, + { + "description": "Customer's first name", + "name": "FIRST_NAME" + }, + { + "description": "Customer's last name", + "name": "LAST_NAME" + }, + { + "description": "Customer's email address", + "name": "EMAIL" + }, + { + "description": "Customer's company", + "name": "COMPANY" + }, + { + "description": "Creation timestamp", + "name": "DATE" + } + ], + "database": "DEMO_DB", + "description": "This dataset contains profile info of each customer. E.g first name, last name, email, company name etc.", + "identifier": "CUSTOMER_PROFILE", + "schema": "METAPHOR", + "uniqueId": "source.metaphor_subscriptions.METAPHOR.CUSTOMER_PROFILE" + }, + { + "columns": [ + { + "description": "Creation timestamp", + "name": "CREATED_AT" + }, + { + "description": "Subscription full display name", + "name": "LONG_NAME" + }, + { + "description": "Subscription price in cents", + "name": "PRICE" + }, + { + "description": "Type of subscription renewal", + "name": "RENEW_TYPE" + }, + { + "description": "Subscription short name", + "name": "SHORT_NAME" + }, + { + "description": "Primary Key", + "name": "SUB_ID" + } + ], + "database": "DEMO_DB", + "description": "This dataset contains all subscriptions info.", + "identifier": "SUBSCRIPTIONS_BASE", + "schema": "METAPHOR", + "uniqueId": "source.metaphor_subscriptions.METAPHOR.SUBSCRIPTIONS_BASE" + }, + { + "columns": [ + { + "description": "Change type", + "name": "CHANGE_TYPE" + }, + { + "description": "Primary Key", + "name": "CHG_ID" + }, + { + "description": "Creation timestamp", + "name": "CREATED_AT" + }, + { + "description": "Subscription ID", + "name": "SUB_ID" + } + ], + "database": "DEMO_DB", + "description": "This dataset represents all the raw subscription changes info of our product. Each subscription is represented by unique subs_id and each change has a unique chng_id.", + "identifier": "SUBSCRIPTIONS_CHANGE_RAW", + "schema": "METAPHOR", + "uniqueId": "source.metaphor_subscriptions.METAPHOR.SUBSCRIPTIONS_CHANGE_RAW" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/endpoint.py new file mode 100644 index 00000000..b7be7b82 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import job_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetJobRunTests(variables: Dict[str, Any]): + target = job_targets[variables["jobId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/jaffle_shop.json new file mode 100644 index 00000000..08b0711f --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/jaffle_shop.json @@ -0,0 +1,223 @@ +{ + "job": { + "tests": [ + { + "columnName": "customer_type", + "compiledSql": "\\n \\n \\n\\nwith all_values as (\\n\\n select\\n customer_type as value_field,\\n count(*) as n_records\\n\\n from acme.jaffle_shop.customers\\n group by customer_type\\n\\n)\\n\\nselect *\\nfrom all_values\\nwhere value_field not in (\\n 'new','returning'\\n)\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nwith all_values as (\\n\\n select\\n customer_type as value_field,\\n count(*) as n_records\\n\\n from acme.jaffle_shop.customers\\n group by customer_type\\n\\n)\\n\\nselect *\\nfrom all_values\\nwhere value_field not in (\\n 'new','returning'\\n)\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_accepted_values", + "model.jaffle_shop.customers" + ], + "name": "accepted_values_customers_customer_type__new__returning", + "uniqueId": "test.jaffle_shop.accepted_values_customers_customer_type__new__returning.d12f0947c8" + }, + { + "columnName": "customer_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.customers" + ], + "name": "not_null_customers_customer_id", + "uniqueId": "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d" + }, + { + "columnName": "order_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.orders\\nwhere order_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.orders\\nwhere order_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.orders" + ], + "name": "not_null_orders_order_id", + "uniqueId": "test.jaffle_shop.not_null_orders_order_id.cf6c17daed" + }, + { + "columnName": "customer_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect customer_id\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_customers" + ], + "name": "not_null_stg_customers_customer_id", + "uniqueId": "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa" + }, + { + "columnName": "location_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect location_id\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect location_id\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_locations" + ], + "name": "not_null_stg_locations_location_id", + "uniqueId": "test.jaffle_shop.not_null_stg_locations_location_id.3d237927d2" + }, + { + "columnName": "order_item_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect order_item_id\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect order_item_id\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_order_items" + ], + "name": "not_null_stg_order_items_order_item_id", + "uniqueId": "test.jaffle_shop.not_null_stg_order_items_order_item_id.26a7e2bc35" + }, + { + "columnName": "order_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect order_id\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_orders" + ], + "name": "not_null_stg_orders_order_id", + "uniqueId": "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64" + }, + { + "columnName": "product_id", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect product_id\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect product_id\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_products" + ], + "name": "not_null_stg_products_product_id", + "uniqueId": "test.jaffle_shop.not_null_stg_products_product_id.6373b0acf3" + }, + { + "columnName": "supply_uuid", + "compiledSql": "\\n \\n \\n\\n\\n\\nselect supply_uuid\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\n\\n\\nselect supply_uuid\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_not_null", + "model.jaffle_shop.stg_supplies" + ], + "name": "not_null_stg_supplies_supply_uuid", + "uniqueId": "test.jaffle_shop.not_null_stg_supplies_supply_uuid.515c6eda6d" + }, + { + "columnName": "customer_id", + "compiledSql": "\\n \\n \\n\\nwith child as (\\n select customer_id as from_field\\n from acme.jaffle_shop.orders\\n where customer_id is not null\\n),\\n\\nparent as (\\n select customer_id as to_field\\n from acme.jaffle_shop.stg_customers\\n)\\n\\nselect\\n from_field\\n\\nfrom child\\nleft join parent\\n on child.from_field = parent.to_field\\n\\nwhere parent.to_field is null\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nwith child as (\\n select customer_id as from_field\\n from acme.jaffle_shop.orders\\n where customer_id is not null\\n),\\n\\nparent as (\\n select customer_id as to_field\\n from acme.jaffle_shop.stg_customers\\n)\\n\\nselect\\n from_field\\n\\nfrom child\\nleft join parent\\n on child.from_field = parent.to_field\\n\\nwhere parent.to_field is null\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_relationships", + "model.jaffle_shop.orders", + "model.jaffle_shop.stg_customers" + ], + "name": "relationships_orders_customer_id__customer_id__ref_stg_customers_", + "uniqueId": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_stg_customers_.918495ce16" + }, + { + "columnName": "customer_id", + "compiledSql": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.customers" + ], + "name": "unique_customers_customer_id", + "uniqueId": "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1" + }, + { + "columnName": "order_id", + "compiledSql": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.orders" + ], + "name": "unique_orders_order_id", + "uniqueId": "test.jaffle_shop.unique_orders_order_id.fed79b3a6e" + }, + { + "columnName": "customer_id", + "compiledSql": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n customer_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_customers\\nwhere customer_id is not null\\ngroup by customer_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_customers" + ], + "name": "unique_stg_customers_customer_id", + "uniqueId": "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada" + }, + { + "columnName": "location_id", + "compiledSql": "\\n \\n \\n\\nselect\\n location_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is not null\\ngroup by location_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n location_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_locations\\nwhere location_id is not null\\ngroup by location_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_locations" + ], + "name": "unique_stg_locations_location_id", + "uniqueId": "test.jaffle_shop.unique_stg_locations_location_id.2e2fc58ecc" + }, + { + "columnName": "order_item_id", + "compiledSql": "\\n \\n \\n\\nselect\\n order_item_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is not null\\ngroup by order_item_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n order_item_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_order_items\\nwhere order_item_id is not null\\ngroup by order_item_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_order_items" + ], + "name": "unique_stg_order_items_order_item_id", + "uniqueId": "test.jaffle_shop.unique_stg_order_items_order_item_id.90e333a108" + }, + { + "columnName": "order_id", + "compiledSql": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n order_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_orders\\nwhere order_id is not null\\ngroup by order_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_orders" + ], + "name": "unique_stg_orders_order_id", + "uniqueId": "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a" + }, + { + "columnName": "product_id", + "compiledSql": "\\n \\n \\n\\nselect\\n product_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is not null\\ngroup by product_id\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n product_id as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_products\\nwhere product_id is not null\\ngroup by product_id\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_products" + ], + "name": "unique_stg_products_product_id", + "uniqueId": "test.jaffle_shop.unique_stg_products_product_id.7d950a1467" + }, + { + "columnName": "supply_uuid", + "compiledSql": "\\n \\n \\n\\nselect\\n supply_uuid as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is not null\\ngroup by supply_uuid\\nhaving count(*) > 1\\n\\n\\n", + "compiledCode": "\\n \\n \\n\\nselect\\n supply_uuid as unique_field,\\n count(*) as n_records\\n\\nfrom acme.jaffle_shop.stg_supplies\\nwhere supply_uuid is not null\\ngroup by supply_uuid\\nhaving count(*) > 1\\n\\n\\n", + "dependsOn": [ + "macro.dbt.get_where_subquery", + "macro.dbt.test_unique", + "model.jaffle_shop.stg_supplies" + ], + "name": "unique_stg_supplies_supply_uuid", + "uniqueId": "test.jaffle_shop.unique_stg_supplies_supply_uuid.c9e3edcfed" + } + ] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/london_bike_analysis.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/london_bike_analysis.json new file mode 100644 index 00000000..4826b0d7 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/london_bike_analysis.json @@ -0,0 +1,5 @@ +{ + "job": { + "tests": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/metaphor_subscriptions.json b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/metaphor_subscriptions.json new file mode 100644 index 00000000..4826b0d7 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_job_run_tests/metaphor_subscriptions.json @@ -0,0 +1,5 @@ +{ + "job": { + "tests": [] + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/__init__.py b/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/endpoint.py b/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/endpoint.py new file mode 100644 index 00000000..7e292ebd --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/endpoint.py @@ -0,0 +1,12 @@ +import os +from typing import Any, Dict + +from ..targets import environment_targets + +dir_path = os.path.dirname(os.path.realpath(__file__)) + + +def fake_GetMacroArguments(variables: Dict[str, Any]): + target = environment_targets[variables["environmentId"]] + with open(f"{dir_path}/{target}.json") as f: + return f.read() diff --git a/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/jaffle_shop.json b/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/jaffle_shop.json new file mode 100644 index 00000000..e3b10119 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/get_macro_arguments/jaffle_shop.json @@ -0,0 +1,632 @@ +{ + "environment": { + "definition": { + "macros": { + "edges": [ + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__date_part" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__day_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__from_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__get_base_dates" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.bigquery__to_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.date_part" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.date_spine" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.day_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.day_of_month" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.day_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__date_part" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__date_spine" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__day_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__day_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__from_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__generate_series" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__get_base_dates" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__get_date_dimension" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__get_fiscal_year_dates" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__get_intervals_between" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__get_powers_of_two" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__iso_week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__iso_week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__iso_week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__to_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.default__week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.from_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.generate_series" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_base_dates" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_date_dimension" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_fiscal_periods" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_fiscal_year_dates" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_intervals_between" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.get_powers_of_two" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date._iso_week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.iso_week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date._iso_week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.iso_week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date._iso_week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.iso_week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.last_month" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.last_month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.last_month_number" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.last_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_days_ago" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_days_away" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.next_month" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.next_month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.next_month_number" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.next_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_months_ago" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_months_away" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.now" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_weeks_ago" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.n_weeks_away" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.periods_since" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__day_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__day_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__from_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__get_date_dimension" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__iso_week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__iso_week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.postgres__week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.redshift__convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.redshift__day_of_month" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.redshift__day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.redshift__day_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.round_timestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__day_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__day_of_week" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__from_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__iso_week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__iso_week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__iso_week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__month_name" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__to_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.snowflake__week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.spark__convert_timezone" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.today" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.tomorrow" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.to_unixtimestamp" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.week_end" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.week_of_year" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.week_start" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.dbt_date.yesterday" + } + }, + { + "node": { + "arguments": [], + "uniqueId": "macro.jaffle_shop.cents_to_dollars" + } + } + ], + "pageInfo": { + "hasNextPage": false, + "endCursor": "Y3Vyc29yOm1hY3JvLmphZmZsZV9zaG9wLmNlbnRzX3RvX2RvbGxhcnM=" + } + } + } + } +} diff --git a/tests/dbt/cloud/fake_graphql_server/targets.py b/tests/dbt/cloud/fake_graphql_server/targets.py new file mode 100644 index 00000000..8c595cc4 --- /dev/null +++ b/tests/dbt/cloud/fake_graphql_server/targets.py @@ -0,0 +1,11 @@ +environment_targets = { + 132676: "jaffle_shop", + 12: "metaphor_subscriptions", + 13: "london_bike_analysis", +} + +job_targets = { + 21: "jaffle_shop", + 22: "metaphor_subscriptions", + 23: "london_bike_analysis", +} diff --git a/tests/dbt/cloud/test_client.py b/tests/dbt/cloud/test_client.py index 8ecf91a6..c7fc8c4f 100644 --- a/tests/dbt/cloud/test_client.py +++ b/tests/dbt/cloud/test_client.py @@ -1,7 +1,7 @@ from typing import Dict from unittest.mock import patch -from metaphor.dbt.cloud.client import DbtAdminAPIClient, DbtRun +from metaphor.dbt.cloud.client import DbtAdminAPIClient class Response: @@ -198,31 +198,6 @@ def test_get_project_jobs(mock_requests): assert jobs == [3333, 2222] -@patch("metaphor.dbt.cloud.client.requests") -def test_get_run_artifact(mock_requests): - client = DbtAdminAPIClient( - base_url="http://base.url", - account_id=1111, - service_token="service_token", - ) - - mock_requests.get.return_value = Response(200, {"artifact": "json"}) - - run = DbtRun(run_id=2222, project_id=3333, job_id=4444) - path = client.get_run_artifact(run, "manifest.json") - assert path.endswith("/3333-4444-manifest.json") - - mock_requests.get.assert_called_once_with( - "http://base.url/api/v2/accounts/1111/runs/2222/artifacts/manifest.json", - params=None, - headers={ - "Content-Type": "application/json", - "Authorization": "Token service_token", - }, - timeout=600, - ) - - @patch("metaphor.dbt.cloud.client.requests") def test_job_is_included(mock_requests): client = DbtAdminAPIClient( diff --git a/tests/dbt/cloud/test_dbt_test_parser.py b/tests/dbt/cloud/test_dbt_test_parser.py new file mode 100644 index 00000000..acb90abf --- /dev/null +++ b/tests/dbt/cloud/test_dbt_test_parser.py @@ -0,0 +1,108 @@ +import datetime +from typing import Dict + +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModels as Model, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModelsColumns, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_models import ( + GetJobRunModelsJobModelsRunResults as RunResult, +) +from metaphor.dbt.cloud.discovery_api.generated.get_job_run_tests import ( + GetJobRunTestsJobTests as Test, +) +from metaphor.dbt.cloud.parser.dbt_test_parser import TestParser +from metaphor.models.metadata_change_event import ( + DataPlatform, + Dataset, + DbtModel, + VirtualView, +) + + +def test_dbt_test_parser(): + virtual_views: Dict[str, VirtualView] = {} + datasets: Dict[str, Dataset] = {} + test_parser = TestParser( + platform=DataPlatform.SNOWFLAKE, + account="john.doe@metaphor.io", + virtual_views=virtual_views, + datasets=datasets, + ) + models = {} + test = Test( + dependsOn=[], + name="test", + uniqueId="test.unique.id", + compiledCode="compiledCode", + compiledSql="compiledSql", + columnName="column", + ) + + # No depends_on - nothing + test_parser.parse(test, models) + assert not virtual_views + + # No model in depends_on - nothing + test.depends_on = ["foo"] + test_parser.parse(test, models) + assert not virtual_views + + # depends_on model not in virtual_views - nothing + test.depends_on = ["model.foo"] + test_parser.parse(test, models) + assert not virtual_views + + # depends_on model not in models - nothing + virtual_views["model.foo"] = VirtualView(dbt_model=DbtModel()) + test_parser.parse(test, models) + dbt_model = virtual_views["model.foo"].dbt_model + assert dbt_model and not dbt_model.tests + + # model is not qualified - no data quality monitor + model = Model( + runResults=[ + RunResult( + status="pass", + executeCompletedAt=datetime.datetime.now(), + ), + ], + alias=None, + columns=[ + GetJobRunModelsJobModelsColumns( + comment=None, + description=None, + meta=None, + name="col", + tags=[], + type="TEXT", + ), + ], + compiledCode="compiledCode", + compiledSql="compiledSql", + database=None, + dependsOn=[], + description="description", + environmentId=1234, + materializedType="MATERIALIZED_VIEW", + meta=None, + name="foo", + packageName="package", + rawCode="rawCode", + rawSql="rawSql", + schema=None, + tags=None, + uniqueId="model.foo", + ) + models["model.foo"] = model + virtual_views["model.foo"] = VirtualView(dbt_model=DbtModel()) + test_parser.parse(test, models) + assert not datasets + + # Test does not have name - no data quality monitor + test.name = None + virtual_views["model.foo"] = VirtualView(dbt_model=DbtModel()) + test_parser.parse(test, models) + assert not datasets diff --git a/tests/dbt/cloud/test_discovery_api.py b/tests/dbt/cloud/test_discovery_api.py deleted file mode 100644 index 85ca9f21..00000000 --- a/tests/dbt/cloud/test_discovery_api.py +++ /dev/null @@ -1,83 +0,0 @@ -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Dict -from unittest.mock import MagicMock, patch - -from metaphor.dbt.cloud.discovery_api import DiscoveryAPI - - -@patch("requests.post") -def test(mock_requests_post: MagicMock): - def fake_post( - url: str, headers: Dict[str, Any], json: Dict[str, Any], timeout: int - ): - @dataclass - class Response: - response: Dict[str, Any] - - def json(self): - return { - "data": self.response, - } - - if json["query"].strip().startswith("query Models"): - return Response( - response={ - "job": { - "models": [ - { - "alias": None, - "database": "db", - "schema": "sch", - "name": "tab", - "uniqueId": "foo", - }, - ] - } - } - ) - elif json["query"].strip().startswith("query Tests"): - job_id = json["variables"].get("jobId", 0) - if job_id == 0: - return Response(response={"job": {"tests": []}}) - elif job_id == 1: - return Response( - response={ - "job": { - "tests": [ - { - "uniqueId": "1", - "name": None, - "columnName": "col", - "status": "pass", - "executeCompletedAt": datetime.now(), - "dependsOn": [ - "model.foo.bar", - ], - }, - { - "uniqueId": "2", - "name": "not pass", - "columnName": "col2", - "status": "error", - "executeCompletedAt": datetime.now(), - "dependsOn": [ - "model.foo.bar", - ], - }, - ] - } - } - ) - assert False - - mock_requests_post.side_effect = fake_post - discovery_api = DiscoveryAPI("url", "token") - assert discovery_api.get_all_job_model_names(123)["foo"] == "db.sch.tab" - assert not discovery_api.get_all_job_tests(0) - test_statuses = discovery_api.get_all_job_tests(1) - assert len(test_statuses) == 2 - assert test_statuses[0].name is None - assert test_statuses[0].status == "pass" - assert test_statuses[1].columnName == "col2" - assert test_statuses[1].status == "error" diff --git a/tests/dbt/cloud/test_extractor.py b/tests/dbt/cloud/test_extractor.py index c50ffd83..4fee5112 100644 --- a/tests/dbt/cloud/test_extractor.py +++ b/tests/dbt/cloud/test_extractor.py @@ -1,198 +1,80 @@ -from datetime import datetime +import json +from typing import List, Set from unittest.mock import MagicMock, patch import pytest +from httpx import Response from metaphor.common.base_config import OutputConfig +from metaphor.common.event_util import EventUtil from metaphor.dbt.cloud.client import DbtRun from metaphor.dbt.cloud.config import DbtCloudConfig -from metaphor.dbt.cloud.discovery_api import DiscoveryTestNode from metaphor.dbt.cloud.extractor import DbtCloudExtractor -from metaphor.models.metadata_change_event import ( - DataMonitorStatus, - DataMonitorTarget, - DataPlatform, - Dataset, - DatasetLogicalID, - VirtualView, - VirtualViewLogicalID, - VirtualViewType, -) - - -@patch("metaphor.dbt.cloud.extractor.DiscoveryAPI") -@patch("metaphor.dbt.cloud.extractor.get_data_platform_from_manifest") -@patch("metaphor.dbt.cloud.extractor.DbtAdminAPIClient") -@patch("metaphor.dbt.cloud.extractor.DbtExtractor") -@pytest.mark.asyncio -async def test_extractor( - mock_dbt_extractor_class: MagicMock, - mock_client_class: MagicMock, - mock_get_data_platform_from_manifest: MagicMock, - mock_discovery_api_class: MagicMock, -): - mock_client = MagicMock() - mock_client.get_last_successful_run = MagicMock( - side_effect=( - DbtRun(run_id=3333, project_id=4444, job_id=2222), - DbtRun(run_id=7777, project_id=6666, job_id=8888), - DbtRun(run_id=3333, project_id=4444, job_id=2222), - ) - ) - mock_client.get_project_jobs = MagicMock(side_effect=[[8888], [2222]]) - - def mock_is_job_included(job_id: int) -> bool: - return job_id != 3333 - - mock_client.is_job_included = mock_is_job_included - mock_client.get_snowflake_account = MagicMock(return_value="snowflake_account") - mock_client.get_run_artifact = MagicMock(return_value="tempfile") - - mock_get_data_platform_from_manifest.return_value = DataPlatform.UNKNOWN - - mock_dbt_extractor = MagicMock() - - async def fake_extract(): - return [] +from tests.dbt.cloud.fake_graphql_server import endpoints, targets +from tests.test_utils import load_json + + +def mock_post(url: str, content: str, **kwargs): + content_json = json.loads(content) + operation_name = content_json["operationName"] + variables = content_json["variables"] + results = endpoints[operation_name](variables) + payload = {"data": json.loads(results)} + return Response(200, content=json.dumps(payload)) + + +class MockAdminClient: + def __init__( + self, + base_url: str, + account_id: int, + service_token: str, + included_env_ids: Set[int] = set(), + ): + self.job_env = { + j: e + for j, e in zip( + targets.job_targets.keys(), targets.environment_targets.keys() + ) + } - mock_dbt_extractor.extract.side_effect = fake_extract + def get_project_jobs(self, project_id: int) -> List[int]: + return list(self.job_env.keys()) - mock_client_class.return_value = mock_client - mock_dbt_extractor_class.return_value = mock_dbt_extractor + def is_job_included(self, job_id: int): + return True - mock_discovery_api = MagicMock() - mock_discovery_api.get_all_job_tests.return_value = [] + def get_last_successful_run(self, job_id: int, page_size=50): + return DbtRun( + project_id=123, + job_id=job_id, + run_id=job_id * 10 + 1, + environment_id=self.job_env[job_id], + ) - config = DbtCloudConfig( - output=OutputConfig(), - account_id=1111, - job_ids={2222, 3333}, - project_ids={6666, 4444}, - environment_ids={1}, - base_url="https://cloud.metaphor.getdbt.com", - service_token="service_token", - ) - extractor = DbtCloudExtractor(config) - await extractor.extract() - assert sorted(extractor._entities.keys()) == [3333, 7777] + def get_snowflake_account(self, project_id: int): + return "john.doe@metaphor.io" -@patch("metaphor.dbt.cloud.extractor.get_data_platform_from_manifest") +@pytest.mark.asyncio() @patch("metaphor.dbt.cloud.extractor.DbtAdminAPIClient") -@patch("metaphor.dbt.cloud.extractor.DbtExtractor") -@pytest.mark.asyncio -async def test_extractor_bad_source( - mock_dbt_extractor_class: MagicMock, - mock_client_class: MagicMock, - mock_get_data_platform_from_manifest: MagicMock, +@patch("httpx.Client.post") +async def test_extractor( + mock_httpx_client_post: MagicMock, + mock_admin_client: MagicMock, + test_root_dir: str, ): - mock_client = MagicMock() - mock_client.get_last_successful_run = MagicMock( - side_effect=( - DbtRun(run_id=3333, project_id=4444, job_id=2222), - DbtRun(run_id=7777, project_id=6666, job_id=8888), - DbtRun(run_id=3333, project_id=4444, job_id=2222), + mock_httpx_client_post.side_effect = mock_post + mock_admin_client.side_effect = MockAdminClient + + extractor = DbtCloudExtractor( + DbtCloudConfig( + output=OutputConfig(), + account_id=1, + service_token="tok", + project_ids={123}, ) ) - mock_client.get_project_jobs = MagicMock(side_effect=[[8888], [2222]]) - mock_client.get_snowflake_account = MagicMock(return_value="snowflake_account") - mock_client.get_run_artifact = MagicMock(return_value="tempfile") - - mock_get_data_platform_from_manifest.return_value = DataPlatform.UNKNOWN - - mock_dbt_extractor = MagicMock() - - async def fake_extract(): - raise ValueError() - - mock_dbt_extractor.extract.side_effect = fake_extract - - mock_client_class.return_value = mock_client - mock_dbt_extractor_class.return_value = mock_dbt_extractor - - config = DbtCloudConfig( - output=OutputConfig(), - account_id=1111, - job_ids={2222}, - project_ids={6666, 4444}, - base_url="https://cloud.metaphor.getdbt.com", - service_token="service_token", - ) - extractor = DbtCloudExtractor(config) - await extractor.extract() - assert not extractor._entities - - -@patch("metaphor.dbt.cloud.extractor.DiscoveryAPI") -def test_extend_test_run_results_entities(mock_discovery_api_class: MagicMock): - config = DbtCloudConfig( - output=OutputConfig(), - account_id=1111, - job_ids={2222}, - project_ids={6666, 4444}, - base_url="https://cloud.metaphor.getdbt.com", - service_token="service_token", - ) - extractor = DbtCloudExtractor(config) - mock_discovery_api = MagicMock() - mock_discovery_api.get_all_job_model_names.return_value = { - "model.foo.bar": "db.sch.tab" - } - - def fake_get_all_job_tests(job_id: int): - return [ - DiscoveryTestNode( - uniqueId="1", - name="test1", - columnName="col1", - status="pass", - executeCompletedAt=datetime.now(), - dependsOn=["model.foo.bar"], - ), - DiscoveryTestNode( - uniqueId="2", - name="test2", - columnName="col2", - status="error", - executeCompletedAt=datetime.now(), - dependsOn=["model.foo.bar"], - ), - ] - - mock_discovery_api.get_all_job_tests.side_effect = fake_get_all_job_tests - mock_discovery_api_class.return_value = mock_discovery_api - entities = [ - VirtualView( - logical_id=VirtualViewLogicalID( - name="foo.bar", - type=VirtualViewType.DBT_MODEL, - ), - ), - Dataset( - logical_id=DatasetLogicalID( - name="a.b.c", - platform=DataPlatform.UNKNOWN, - ) - ), - ] - - res = extractor._extend_test_run_results_entities( - DataPlatform.UNKNOWN, None, 2222, entities - ) - assert len(res) == 3 - dataset = next( - x for x in res if isinstance(x, Dataset) and x.data_quality is not None - ) - assert dataset.data_quality and dataset.data_quality.monitors - assert dataset.data_quality.monitors[0].status == DataMonitorStatus.PASSED - assert dataset.data_quality.monitors[0].targets == [ - DataMonitorTarget( - column="col1", dataset="DATASET~083603875008F6D0B4981A524F67A549" - ) - ] - assert dataset.data_quality.monitors[1].status == DataMonitorStatus.ERROR - assert dataset.data_quality.monitors[1].targets == [ - DataMonitorTarget( - column="col2", dataset="DATASET~083603875008F6D0B4981A524F67A549" - ) - ] + events = [EventUtil.trim_event(e) for e in await extractor.extract()] + expected = f"{test_root_dir}/dbt/cloud/expected.json" + assert events == load_json(expected)