Skip to content

Commit

Permalink
Mark integration tests explicitly as such. (#19105)
Browse files Browse the repository at this point in the history
* Mark integration tests explicitly as such.

* also migrate kafka consumer test
  • Loading branch information
iliakur authored Nov 21, 2024
1 parent 3161120 commit 2ce92e0
Show file tree
Hide file tree
Showing 13 changed files with 127 additions and 126 deletions.
1 change: 1 addition & 0 deletions aerospike/tests/test_aerospike.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def test_check(aggregator, instance, dd_run_check):
_test_check(aggregator)


@pytest.mark.integration
def test_version_metadata(aggregator, instance, datadog_agent, dd_run_check):

check = AerospikeCheck('aerospike', {}, [instance])
Expand Down
17 changes: 17 additions & 0 deletions boundary/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,20 @@ def test(aggregator, dd_run_check, get_check, instance):

aggregator.assert_metrics_using_metadata(metadata_metrics, check_submission_type=True)
aggregator.assert_all_metrics_covered()


def test_health_wrong_endpoint(aggregator, dd_run_check, get_check, instance):
instance = instance.copy()
health_endpoint = 'http://localhost:1234'
instance['health_endpoint'] = health_endpoint
instance['timeout'] = 1

check = get_check(instance)
dd_run_check(check)

aggregator.assert_service_check(
'boundary.controller.health', ServiceCheck.CRITICAL, tags=[f'endpoint:{health_endpoint}', *instance['tags']]
)
aggregator.assert_service_check(
'boundary.openmetrics.health', ServiceCheck.OK, tags=[f'endpoint:{METRIC_ENDPOINT}', *instance['tags']]
)
17 changes: 0 additions & 17 deletions boundary/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,23 +24,6 @@ def test_without_extra_tags(aggregator, dd_run_check, get_check, instance, mock_
)


def test_health_wrong_endpoint(aggregator, dd_run_check, get_check, instance):
instance = instance.copy()
health_endpoint = 'http://localhost:1234'
instance['health_endpoint'] = health_endpoint
instance['timeout'] = 1

check = get_check(instance)
dd_run_check(check)

aggregator.assert_service_check(
'boundary.controller.health', ServiceCheck.CRITICAL, tags=[f'endpoint:{health_endpoint}', *instance['tags']]
)
aggregator.assert_service_check(
'boundary.openmetrics.health', ServiceCheck.OK, tags=[f'endpoint:{METRIC_ENDPOINT}', *instance['tags']]
)


def test_health_error(aggregator, dd_run_check, get_check, instance, mock_http_response):
mock_http_response(status_code=404)

Expand Down
41 changes: 41 additions & 0 deletions fluentd/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,3 +79,44 @@ def test_fluentd_with_custom_tags(aggregator, dd_run_check):
aggregator.assert_service_check(check.SERVICE_CHECK_NAME, status=Fluentd.OK, tags=sc_tags, count=1)

aggregator.assert_all_metrics_covered()


def test_default_timeout(instance):
# test default timeout
check = Fluentd(CHECK_NAME, {}, [instance])
check.check(None)

assert check.http.options['timeout'] == (5, 5)


def test_init_config_old_timeout(instance):
# test init_config timeout
check = Fluentd(CHECK_NAME, {'default_timeout': 2}, [instance])
check.check(None)
assert check.http.options['timeout'] == (2, 2)


def test_init_config_timeout(instance):
# test init_config timeout
check = Fluentd(CHECK_NAME, {'timeout': 7}, [instance])
check.check(None)

assert check.http.options['timeout'] == (7, 7)


def test_instance_old_timeout(instance):
# test instance default_timeout
instance['default_timeout'] = 13
check = Fluentd(CHECK_NAME, {'default_timeout': 9}, [instance])
check.check(None)

assert check.http.options['timeout'] == (13, 13)


def test_instance_timeout(instance):
# test instance timeout
instance['timeout'] = 15
check = Fluentd(CHECK_NAME, {}, [instance])
check.check(None)

assert check.http.options['timeout'] == (15, 15)
47 changes: 0 additions & 47 deletions fluentd/tests/test_unit.py

This file was deleted.

39 changes: 39 additions & 0 deletions kafka_consumer/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,3 +467,42 @@ def test_regex_consumer_groups(
aggregator.assert_metric("kafka.estimated_consumer_lag", count=consumer_lag_seconds_count)

assert expected_warning in caplog.text


@pytest.mark.parametrize(
'read_persistent_cache, kafka_instance_config, consumer_lag_seconds_count',
[
pytest.param(
"",
{
'consumer_groups': {},
'data_streams_enabled': 'true',
'monitor_unlisted_consumer_groups': True,
},
0,
id='Read from cache failed',
),
],
)
def test_load_broker_timestamps_empty(
read_persistent_cache,
kafka_instance_config,
consumer_lag_seconds_count,
kafka_instance,
dd_run_check,
caplog,
aggregator,
check,
):

kafka_instance.update(kafka_instance_config)
check = check(kafka_instance)
check.read_persistent_cache = mock.Mock(return_value=read_persistent_cache)
dd_run_check(check)

caplog.set_level(logging.WARN)
expected_warning = " Could not read broker timestamps from cache"

assert expected_warning in caplog.text
aggregator.assert_metric("kafka.estimated_consumer_lag", count=consumer_lag_seconds_count)
assert check.read_persistent_cache.mock_calls == [mock.call("broker_timestamps_")]
39 changes: 0 additions & 39 deletions kafka_consumer/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,42 +389,3 @@ def test_get_interpolated_timestamp():
assert _get_interpolated_timestamp({10: 100, 20: 200}, 5) == 50
assert _get_interpolated_timestamp({0: 100, 10: 200}, 15) == 250
assert _get_interpolated_timestamp({10: 200}, 15) is None


@pytest.mark.parametrize(
'read_persistent_cache, kafka_instance_config, consumer_lag_seconds_count',
[
pytest.param(
"",
{
'consumer_groups': {},
'data_streams_enabled': 'true',
'monitor_unlisted_consumer_groups': True,
},
0,
id='Read from cache failed',
),
],
)
def test_load_broker_timestamps_empty(
read_persistent_cache,
kafka_instance_config,
consumer_lag_seconds_count,
kafka_instance,
dd_run_check,
caplog,
aggregator,
check,
):

kafka_instance.update(kafka_instance_config)
check = check(kafka_instance)
check.read_persistent_cache = mock.Mock(return_value=read_persistent_cache)
dd_run_check(check)

caplog.set_level(logging.WARN)
expected_warning = " Could not read broker timestamps from cache"

assert expected_warning in caplog.text
aggregator.assert_metric("kafka.estimated_consumer_lag", count=consumer_lag_seconds_count)
assert check.read_persistent_cache.mock_calls == [mock.call("broker_timestamps_")]
1 change: 1 addition & 0 deletions mysql/tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -701,6 +701,7 @@ def test_collect_schemas(aggregator, dd_run_check, dbm_instance):
assert deep_compare(expected_data_for_db[db_name], actual_payload)


@pytest.mark.integration
def test_schemas_collection_truncated(aggregator, dd_run_check, dbm_instance):

dbm_instance['dbm'] = True
Expand Down
1 change: 1 addition & 0 deletions mysql/tests/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,7 @@ def test_complex_config_replica(aggregator, dd_run_check, instance_complex):
assert mysql_check._is_group_replication_active(db) is False


@pytest.mark.integration
@pytest.mark.parametrize(
'dbm_enabled, reported_hostname, expected_hostname',
[
Expand Down
2 changes: 2 additions & 0 deletions sqlserver/tests/test_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,6 +724,7 @@ def test_get_estimated_row_size_bytes(dbm_instance, file):
assert abs((actual_size - computed_size) / float(actual_size)) <= 0.10


@pytest.mark.integration
def test_activity_collection_rate_limit(aggregator, dd_run_check, dbm_instance):
# test the activity collection loop rate limit
collection_interval = 0.1
Expand Down Expand Up @@ -758,6 +759,7 @@ def _expected_dbm_instance_tags(check):
return check._config.tags


@pytest.mark.integration
@pytest.mark.parametrize("activity_enabled", [True, False])
def test_async_job_enabled(dd_run_check, dbm_instance, activity_enabled):
dbm_instance['query_activity'] = {'enabled': activity_enabled, 'run_sync': False}
Expand Down
2 changes: 1 addition & 1 deletion sqlserver/tests/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ def test_connection_failure(aggregator, dd_run_check, instance_docker):
)


@pytest.mark.unit
@pytest.mark.integration
@pytest.mark.parametrize(
"test_case_name,instance_overrides,expected_error_patterns,expected_error",
[
Expand Down
24 changes: 24 additions & 0 deletions yarn/tests/test_integration.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
# (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os

import pytest

from datadog_checks.base import AgentCheck
from datadog_checks.yarn import YarnCheck

from . import common

Expand All @@ -30,3 +32,25 @@ def assert_check(aggregator):
aggregator.assert_metric_has_tag(metric, common.LEGACY_CLUSTER_TAG)

aggregator.assert_all_metrics_covered()


@pytest.mark.integration
def test_metadata(aggregator, instance, datadog_agent):
check = YarnCheck("yarn", {}, [instance])
check.check_id = "test:123"

check.check(instance)

raw_version = os.getenv("YARN_VERSION")

major, minor, patch = raw_version.split(".")

version_metadata = {
"version.scheme": "semver",
"version.major": major,
"version.minor": minor,
"version.patch": patch,
"version.raw": raw_version,
}

datadog_agent.assert_metadata("test:123", version_metadata)
22 changes: 0 additions & 22 deletions yarn/tests/test_yarn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import copy
import os
import re

import pytest
Expand Down Expand Up @@ -300,27 +299,6 @@ def test_ssl_verification(aggregator, mocked_bad_cert_request):
)


def test_metadata(aggregator, instance, datadog_agent):
check = YarnCheck("yarn", {}, [instance])
check.check_id = "test:123"

check.check(instance)

raw_version = os.getenv("YARN_VERSION")

major, minor, patch = raw_version.split(".")

version_metadata = {
"version.scheme": "semver",
"version.major": major,
"version.minor": minor,
"version.patch": patch,
"version.raw": raw_version,
}

datadog_agent.assert_metadata("test:123", version_metadata)


def test_collect_apps_all_states(dd_run_check, aggregator, mocked_request):
instance = YARN_COLLECT_APPS_ALL_STATES_CONFIG['instances'][0]
yarn = YarnCheck('yarn', {}, [instance])
Expand Down

0 comments on commit 2ce92e0

Please sign in to comment.