Skip to content

Commit

Permalink
add agent_check template for publishing platform integrations
Browse files Browse the repository at this point in the history
  • Loading branch information
james-eichelbaum committed Sep 16, 2024
1 parent ef41af5 commit 45e2dd0
Show file tree
Hide file tree
Showing 18 changed files with 390 additions and 1 deletion.
11 changes: 11 additions & 0 deletions datadog_checks_dev/datadog_checks/dev/tooling/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,16 @@
[9]: https://docs.datadoghq.com/help/
"""

CHECK_ONLY_LINKS = """\
[1]: **LINK_TO_INTEGRATION_SITE**
[2]: https://app.datadoghq.com/account/settings/agent/latest
[3]: https://docs.datadoghq.com/agent/kubernetes/integrations/
[4]: https://github.com/DataDog/{repository}/blob/master/{name}/datadog_checks/{name}/data/conf.yaml.example
[5]: https://docs.datadoghq.com/agent/guide/agent-commands/#start-stop-and-restart-the-agent
[6]: https://docs.datadoghq.com/agent/guide/agent-commands/#agent-status-and-information
[9]: https://docs.datadoghq.com/help/
"""

LOGS_LINKS = """\
[1]: https://docs.datadoghq.com/help/
[2]: https://app.datadoghq.com/account/settings/agent/latest
Expand Down Expand Up @@ -132,6 +142,7 @@

integration_type_links = {
'check': CHECK_LINKS,
'check_only': CHECK_ONLY_LINKS,
'logs': LOGS_LINKS,
'jmx': JMX_LINKS,
'snmp_tile': SNMP_TILE_LINKS,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Initial Release
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{license_header}
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{license_header}
__version__ = '{starting_version}'
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{license_header}
from .__about__ import __version__
from .check import {check_class}

__all__ = ['__version__', '{check_class}']
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
{license_header}
from typing import Any # noqa: F401

from datadog_checks.base import AgentCheck # noqa: F401

# from datadog_checks.base.utils.db import QueryManager
# from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout
# from json import JSONDecodeError


class {check_class}(AgentCheck):

# This will be the prefix of every metric and service check the integration sends
__NAMESPACE__ = '{check_name}'

def __init__(self, name, init_config, instances):
super({check_class}, self).__init__(name, init_config, instances)

# Use self.instance to read the check configuration
# self.url = self.instance.get("url")

# If the check is going to perform SQL queries you should define a query manager here.
# More info at
# https://datadoghq.dev/integrations-core/base/databases/#datadog_checks.base.utils.db.core.QueryManager
# sample_query = {{
# "name": "sample",
# "query": "SELECT * FROM sample_table",
# "columns": [
# {{"name": "metric", "type": "gauge"}}
# ],
# }}
# self._query_manager = QueryManager(self, self.execute_query, queries=[sample_query])
# self.check_initializations.append(self._query_manager.compile_queries)

def check(self, _):
# type: (Any) -> None
# The following are useful bits of code to help new users get started.

# Perform HTTP Requests with our HTTP wrapper.
# More info at https://datadoghq.dev/integrations-core/base/http/
# try:
# response = self.http.get(self.url)
# response.raise_for_status()
# response_json = response.json()

# except Timeout as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="Request timeout: {{}}, {{}}".format(self.url, e),
# )
# raise

# except (HTTPError, InvalidURL, ConnectionError) as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="Request failed: {{}}, {{}}".format(self.url, e),
# )
# raise

# except JSONDecodeError as e:
# self.service_check(
# "can_connect",
# AgentCheck.CRITICAL,
# message="JSON Parse failed: {{}}, {{}}".format(self.url, e),
# )
# raise

# except ValueError as e:
# self.service_check(
# "can_connect", AgentCheck.CRITICAL, message=str(e)
# )
# raise

# This is how you submit metrics
# There are different types of metrics that you can submit (gauge, event).
# More info at https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck
# self.gauge("test", 1.23, tags=['foo:bar'])

# Perform database queries using the Query Manager
# self._query_manager.execute()

# This is how you use the persistent cache. This cache file based and persists across agent restarts.
# If you need an in-memory cache that is persisted across runs
# You can define a dictionary in the __init__ method.
# self.write_persistent_cache("key", "value")
# value = self.read_persistent_cache("key")

# If your check ran successfully, you can send the status.
# More info at
# https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck.service_check
# self.service_check("can_connect", AgentCheck.OK)

# If it didn't then it should send a critical service check
self.service_check("can_connect", AgentCheck.CRITICAL)
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{license_header}

{documentation}

from .instance import InstanceConfig
from .shared import SharedConfig


class ConfigMixin:
_config_model_instance: InstanceConfig
_config_model_shared: SharedConfig

@property
def config(self) -> InstanceConfig:
return self._config_model_instance

@property
def shared_config(self) -> SharedConfig:
return self._config_model_shared
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{license_header}

{documentation}

def instance_empty_default_hostname():
return False


def instance_min_collection_interval():
return 15
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{license_header}

{documentation}

from __future__ import annotations

from typing import Optional

from pydantic import BaseModel, ConfigDict, field_validator, model_validator

from datadog_checks.base.utils.functions import identity
from datadog_checks.base.utils.models import validation

from . import defaults, validators


class InstanceConfig(BaseModel):
model_config = ConfigDict(
validate_default=True,
arbitrary_types_allowed=True,
frozen=True,
)
empty_default_hostname: Optional[bool] = None
min_collection_interval: Optional[float] = None
service: Optional[str] = None
tags: Optional[tuple[str, ...]] = None

@model_validator(mode='before')
def _initial_validation(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_instance', identity)(values))

@field_validator('*', mode='before')
def _validate(cls, value, info):
field = cls.model_fields[info.field_name]
field_name = field.alias or info.field_name
if field_name in info.context['configured_fields']:
value = getattr(validators, f'instance_{{info.field_name}}', identity)(value, field=field)
else:
value = getattr(defaults, f'instance_{{info.field_name}}', lambda: value)()

return validation.utils.make_immutable(value)

@model_validator(mode='after')
def _final_validation(cls, model):
return validation.core.check_model(getattr(validators, 'check_instance', identity)(model))
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
{license_header}

{documentation}

from __future__ import annotations

from typing import Optional

from pydantic import BaseModel, ConfigDict, field_validator, model_validator

from datadog_checks.base.utils.functions import identity
from datadog_checks.base.utils.models import validation

from . import defaults, validators


class SharedConfig(BaseModel):
model_config = ConfigDict(
validate_default=True,
arbitrary_types_allowed=True,
frozen=True,
)
service: Optional[str] = None

@model_validator(mode='before')
def _initial_validation(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_shared', identity)(values))

@field_validator('*', mode='before')
def _validate(cls, value, info):
field = cls.model_fields[info.field_name]
field_name = field.alias or info.field_name
if field_name in info.context['configured_fields']:
value = getattr(validators, f'shared_{{info.field_name}}', identity)(value, field=field)
else:
value = getattr(defaults, f'shared_{{info.field_name}}', lambda: value)()

return validation.utils.make_immutable(value)

@model_validator(mode='after')
def _final_validation(cls, model):
return validation.core.check_model(getattr(validators, 'check_shared', identity)(model))
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{license_header}

# Here you can include additional config validators or transformers
#
# def initialize_instance(values, **kwargs):
# if 'my_option' not in values and 'my_legacy_option' in values:
# values['my_option'] = values['my_legacy_option']
# if values.get('my_number') > 10:
# raise ValueError('my_number max value is 10, got %s' % str(values.get('my_number')))
#
# return values
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
## All options defined here are available to all instances.
#
init_config:

## @param service - string - optional
## Attach the tag `service:<SERVICE>` to every metric, event, and service check emitted by this integration.
##
## Additionally, this sets the default `service` for every log source.
#
# service: <SERVICE>

## Every instance is scheduled independently of the others.
#
instances:

-
## @param tags - list of strings - optional
## A list of tags to attach to every metric and service check emitted by this instance.
##
## Learn more about tagging at https://docs.datadoghq.com/tagging
#
# tags:
# - <KEY_1>:<VALUE_1>
# - <KEY_2>:<VALUE_2>

## @param service - string - optional
## Attach the tag `service:<SERVICE>` to every metric, event, and service check emitted by this integration.
##
## Overrides any `service` defined in the `init_config` section.
#
# service: <SERVICE>

## @param min_collection_interval - number - optional - default: 15
## This changes the collection interval of the check. For more information, see:
## https://docs.datadoghq.com/developers/write_agent_check/#collection-interval
#
# min_collection_interval: 15

## @param empty_default_hostname - boolean - optional - default: false
## This forces the check to send metrics with no hostname.
##
## This is useful for cluster-level checks.
#
# empty_default_hostname: false
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[env.collectors.datadog-checks]

[[envs.default.matrix]]
python = ["3.11"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
[build-system]
requires = [
"hatchling>=0.13.0",
]
build-backend = "hatchling.build"

[project]
name = "datadog-{project_name}"
description = "The {integration_name} check"
readme = "README.md"
license = "BSD-3-Clause"
requires-python = ">=3.11"
keywords = [
"datadog",
"datadog agent",
"datadog check",
"{check_name}",
]
authors = [
{{ name = "{author}", email = "{email_packages}" }},
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: BSD License",
"Private :: Do Not Upload",
"Programming Language :: Python :: 3.11",
"Topic :: System :: Monitoring",
]
dependencies = [
"datadog-checks-base>=32.6.0",
]
dynamic = [
"version",
]

[project.optional-dependencies]
deps = []

[project.urls]
Source = "https://github.com/DataDog/{repo_name}"

[tool.hatch.version]
path = "datadog_checks/{check_name}/__about__.py"

[tool.hatch.build.targets.sdist]
include = [
"/datadog_checks",
"/tests",
"/manifest.json",
]

[tool.hatch.build.targets.wheel]
include = [
"/datadog_checks/{check_name}",
]
dev-mode-dirs = [
".",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{license_header}
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{license_header}
import pytest


@pytest.fixture(scope='session')
def dd_environment():
yield


@pytest.fixture
def instance():
return {{}}
Loading

0 comments on commit 45e2dd0

Please sign in to comment.