Skip to content

Commit

Permalink
add agent_check template for publishing platform integrations
Browse files Browse the repository at this point in the history
  • Loading branch information
james-eichelbaum committed Sep 16, 2024
1 parent ef41af5 commit cafad04
Show file tree
Hide file tree
Showing 20 changed files with 442 additions and 11 deletions.
32 changes: 23 additions & 9 deletions datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@

from ...fs import resolve_path
from ..constants import get_root
from ..create import construct_template_fields, create_template_files, get_valid_templates
from ..create import (
construct_template_fields,
create_template_files,
get_valid_templates,
prefill_template_fields_for_check_only,
)
from ..utils import kebab_case_name, normalize_package_name
from .console import CONTEXT_SETTINGS, abort, echo_info, echo_success, echo_warning

Expand Down Expand Up @@ -171,26 +176,35 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru
if integration_type == 'snmp_tile':
integration_dir_name = 'snmp_' + integration_dir_name
integration_dir = os.path.join(root, integration_dir_name)
if os.path.exists(integration_dir):
abort(f'Path `{integration_dir}` already exists!')
# check-only is designed to already have content in it
if integration_type == 'check_only':
if not os.path.exists(os.path.join(integration_dir, "manifest.json")):
abort(f"Expected {integration_dir}/manifest.json to exist")
else:
if os.path.exists(integration_dir):
abort(f'Path `{integration_dir}` already exists!')

template_fields = {'manifest_version': '1.0.0', "today": date.today()}
if integration_type == 'check_only':
template_fields.update(prefill_template_fields_for_check_only(integration_dir_name))
if non_interactive and repo_choice != 'core':
abort(f'Cannot use non-interactive mode with repo_choice: {repo_choice}')

if not non_interactive and not dry_run:
if repo_choice not in ['core', 'integrations-internal-core']:
support_email = click.prompt('Email used for support requests')
template_fields['email'] = support_email
prompt_and_update_if_missing(template_fields, 'email', 'Email used for support requests')
support_email = template_fields['email']
template_fields['email_packages'] = template_fields['email']
if repo_choice == 'extras':
template_fields['author'] = click.prompt('Your name')

if repo_choice == 'marketplace':
author_name = click.prompt('Your Company Name')
homepage = click.prompt('The product or company homepage')
sales_email = click.prompt('Email used for subscription notifications')

prompt_and_update_if_missing(template_fields, 'author_name', 'Your Company Name')
prompt_and_update_if_missing(template_fields, 'homepage', 'The product or company homepage')
prompt_and_update_if_missing(template_fields, 'sales_email', 'Email used for subscription notifications')
author_name = template_fields['author_name']
sales_email = template_fields['sales_email']
homepage = template_fields['homepage']
template_fields['author'] = author_name

eula = 'assets/eula.pdf'
Expand Down
11 changes: 11 additions & 0 deletions datadog_checks_dev/datadog_checks/dev/tooling/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,16 @@
[9]: https://docs.datadoghq.com/help/
"""

CHECK_ONLY_LINKS = """\
[1]: **LINK_TO_INTEGRATION_SITE**
[2]: https://app.datadoghq.com/account/settings/agent/latest
[3]: https://docs.datadoghq.com/agent/kubernetes/integrations/
[4]: https://github.com/DataDog/{repository}/blob/master/{name}/datadog_checks/{name}/data/conf.yaml.example
[5]: https://docs.datadoghq.com/agent/guide/agent-commands/#start-stop-and-restart-the-agent
[6]: https://docs.datadoghq.com/agent/guide/agent-commands/#agent-status-and-information
[9]: https://docs.datadoghq.com/help/
"""

LOGS_LINKS = """\
[1]: https://docs.datadoghq.com/help/
[2]: https://app.datadoghq.com/account/settings/agent/latest
Expand Down Expand Up @@ -132,6 +142,7 @@

integration_type_links = {
'check': CHECK_LINKS,
'check_only': CHECK_ONLY_LINKS,
'logs': LOGS_LINKS,
'jmx': JMX_LINKS,
'snmp_tile': SNMP_TILE_LINKS,
Expand Down
30 changes: 29 additions & 1 deletion datadog_checks_dev/datadog_checks/dev/tooling/create.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import json
import os
from datetime import datetime
from operator import attrgetter
Expand Down Expand Up @@ -52,6 +53,23 @@ def get_valid_templates():
return sorted(templates, key=attrgetter('name'))


def prefill_template_fields_for_check_only(normalized_integration_name: str) -> dict:
manifest_dict = {}
manifest_path = os.path.join(normalized_integration_name, 'manifest.json')
if not os.path.exists(manifest_path):
raise ValueError(f"Expected manifest to exist at {manifest_path}")
with open(manifest_path, 'r') as manifest:
manifest_dict = json.loads(manifest.read())
author = manifest_dict.get("author", {}).get("name")
return {
'author_name': author,
'check_name': normalize_package_name(f"{author}_{normalized_integration_name}"),
'email': manifest_dict.get("author", {}).get("support_email"),
'homepage': manifest_dict.get("author", {}).get("homepage"),
'sales_email': manifest_dict.get("author", {}).get("sales_email"),
}


def construct_template_fields(integration_name, repo_choice, integration_type, **kwargs):
normalized_integration_name = normalize_package_name(integration_name)
check_name_kebab = kebab_case_name(integration_name)
Expand All @@ -71,7 +89,17 @@ def construct_template_fields(integration_name, repo_choice, integration_type, *
4. Upload the build artifact to any host with an Agent and
run `datadog-agent integration install -w
path/to/{normalized_integration_name}/dist/<ARTIFACT_NAME>.whl`."""

if integration_type == 'check_only':
# check_name, author, email come from kwargs due to prefill
check_name = ''
author = ''
email = ''
email_packages = ''
install_info = third_party_install_info
# Static fields
license_header = ''
support_type = 'partner'
integration_links = ''
if repo_choice == 'core':
check_name = normalized_integration_name
author = 'Datadog'
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Initial Release
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{license_header}
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{license_header}
__version__ = '{starting_version}'
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{license_header}
from .__about__ import __version__
from .check import {check_class}

__all__ = ['__version__', '{check_class}']
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
{license_header}
from typing import Any # noqa: F401

from datadog_checks.base import AgentCheck # noqa: F401

# from datadog_checks.base.utils.db import QueryManager
# from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout
# from json import JSONDecodeError


class {check_class}(AgentCheck):

# This will be the prefix of every metric and service check the integration sends
__NAMESPACE__ = '{check_name}'

def __init__(self, name, init_config, instances):
super({check_class}, self).__init__(name, init_config, instances)

# Use self.instance to read the check configuration
# self.url = self.instance.get("url")

# If the check is going to perform SQL queries you should define a query manager here.
# More info at
# https://datadoghq.dev/integrations-core/base/databases/#datadog_checks.base.utils.db.core.QueryManager
# sample_query = {{
# "name": "sample",
# "query": "SELECT * FROM sample_table",
# "columns": [
# {{"name": "metric", "type": "gauge"}}
# ],
# }}
# self._query_manager = QueryManager(self, self.execute_query, queries=[sample_query])
# self.check_initializations.append(self._query_manager.compile_queries)

def check(self, _):
# type: (Any) -> None
# The following are useful bits of code to help new users get started.

# Perform HTTP Requests with our HTTP wrapper.
# More info at https://datadoghq.dev/integrations-core/base/http/
# try:
# response = self.http.get(self.url)
# response.raise_for_status()
# response_json = response.json()

# except Timeout as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="Request timeout: {{}}, {{}}".format(self.url, e),
# )
# raise

# except (HTTPError, InvalidURL, ConnectionError) as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="Request failed: {{}}, {{}}".format(self.url, e),
# )
# raise

# except JSONDecodeError as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="JSON Parse failed: {{}}, {{}}".format(self.url, e),
# )
# raise

# except ValueError as e:
# self.service_check(
# "can_connect", AgentCheck.CRITICAL, message=str(e)
# )
# raise

# This is how you submit metrics
# There are different types of metrics that you can submit (gauge, event).
# More info at https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck
# self.gauge("test", 1.23, tags=['foo:bar'])

# Perform database queries using the Query Manager
# self._query_manager.execute()

# This is how you use the persistent cache. This cache file based and persists across agent restarts.
# If you need an in-memory cache that is persisted across runs
# You can define a dictionary in the __init__ method.
# self.write_persistent_cache("key", "value")
# value = self.read_persistent_cache("key")

# If your check ran successfully, you can send the status.
# More info at
# https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck.service_check
# self.service_check("can_connect", AgentCheck.OK)

# If it didn't then it should send a critical service check
self.service_check("can_connect", AgentCheck.CRITICAL)
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{license_header}

{documentation}

from .instance import InstanceConfig
from .shared import SharedConfig


class ConfigMixin:
_config_model_instance: InstanceConfig
_config_model_shared: SharedConfig

@property
def config(self) -> InstanceConfig:
return self._config_model_instance

@property
def shared_config(self) -> SharedConfig:
return self._config_model_shared
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{license_header}

{documentation}

def instance_empty_default_hostname():
return False


def instance_min_collection_interval():
return 15
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{license_header}

{documentation}

from __future__ import annotations

from typing import Optional

from pydantic import BaseModel, ConfigDict, field_validator, model_validator

from datadog_checks.base.utils.functions import identity
from datadog_checks.base.utils.models import validation

from . import defaults, validators


class InstanceConfig(BaseModel):
model_config = ConfigDict(
validate_default=True,
arbitrary_types_allowed=True,
frozen=True,
)
empty_default_hostname: Optional[bool] = None
min_collection_interval: Optional[float] = None
service: Optional[str] = None
tags: Optional[tuple[str, ...]] = None

@model_validator(mode='before')
def _initial_validation(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_instance', identity)(values))

@field_validator('*', mode='before')
def _validate(cls, value, info):
field = cls.model_fields[info.field_name]
field_name = field.alias or info.field_name
if field_name in info.context['configured_fields']:
value = getattr(validators, f'instance_{{info.field_name}}', identity)(value, field=field)
else:
value = getattr(defaults, f'instance_{{info.field_name}}', lambda: value)()

return validation.utils.make_immutable(value)

@model_validator(mode='after')
def _final_validation(cls, model):
return validation.core.check_model(getattr(validators, 'check_instance', identity)(model))
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
{license_header}

{documentation}

from __future__ import annotations

from typing import Optional

from pydantic import BaseModel, ConfigDict, field_validator, model_validator

from datadog_checks.base.utils.functions import identity
from datadog_checks.base.utils.models import validation

from . import defaults, validators


class SharedConfig(BaseModel):
model_config = ConfigDict(
validate_default=True,
arbitrary_types_allowed=True,
frozen=True,
)
service: Optional[str] = None

@model_validator(mode='before')
def _initial_validation(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_shared', identity)(values))

@field_validator('*', mode='before')
def _validate(cls, value, info):
field = cls.model_fields[info.field_name]
field_name = field.alias or info.field_name
if field_name in info.context['configured_fields']:
value = getattr(validators, f'shared_{{info.field_name}}', identity)(value, field=field)
else:
value = getattr(defaults, f'shared_{{info.field_name}}', lambda: value)()

return validation.utils.make_immutable(value)

@model_validator(mode='after')
def _final_validation(cls, model):
return validation.core.check_model(getattr(validators, 'check_shared', identity)(model))
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{license_header}

# Here you can include additional config validators or transformers
#
# def initialize_instance(values, **kwargs):
# if 'my_option' not in values and 'my_legacy_option' in values:
# values['my_option'] = values['my_legacy_option']
# if values.get('my_number') > 10:
# raise ValueError('my_number max value is 10, got %s' % str(values.get('my_number')))
#
# return values
Loading

0 comments on commit cafad04

Please sign in to comment.