Skip to content

Commit

Permalink
Remove six library from some integrations (#18604)
Browse files Browse the repository at this point in the history
* Remove six library from some integrations

* fix mapreduce
  • Loading branch information
iliakur authored Sep 18, 2024
1 parent d744388 commit 3765e71
Show file tree
Hide file tree
Showing 31 changed files with 198 additions and 263 deletions.
8 changes: 3 additions & 5 deletions datadog_checks_dev/datadog_checks/dev/_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import os
from base64 import urlsafe_b64decode, urlsafe_b64encode

from six import iteritems

DDTRACE_OPTIONS_LIST = [
'DD_TAGS',
'DD_TRACE*',
Expand Down Expand Up @@ -56,7 +54,7 @@ def e2e_testing():


def set_env_vars(env_vars):
for key, value in iteritems(env_vars):
for key, value in env_vars.items():
key = '{}{}'.format(E2E_ENV_VAR_PREFIX, key)
os.environ[key] = value

Expand All @@ -68,11 +66,11 @@ def remove_env_vars(env_vars):

def get_env_vars(raw=False):
if raw:
return {key: value for key, value in iteritems(os.environ) if key.startswith(E2E_ENV_VAR_PREFIX)}
return {key: value for key, value in os.environ.items() if key.startswith(E2E_ENV_VAR_PREFIX)}
else:
env_vars = {}

for key, value in iteritems(os.environ):
for key, value in os.environ.items():
_, found, ev = key.partition(E2E_ENV_VAR_PREFIX)
if found:
# Normalize casing for Windows
Expand Down
9 changes: 4 additions & 5 deletions disk/datadog_checks/disk/disk.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from xml.etree import ElementTree as ET

import psutil
from six import iteritems, string_types

from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
from datadog_checks.base.utils.platform import Platform
Expand Down Expand Up @@ -151,7 +150,7 @@ def check(self, _):
self.log.debug('Passed: %s', part.device)

tags = self._get_tags(part)
for metric_name, metric_value in iteritems(self._collect_part_metrics(part, disk_usage)):
for metric_name, metric_value in self._collect_part_metrics(part, disk_usage).items():
self.gauge(metric_name, metric_value, tags=tags)

# Add in a disk read write or read only check
Expand Down Expand Up @@ -324,7 +323,7 @@ def _collect_inodes_metrics(self, mountpoint):
return metrics

def collect_latency_metrics(self):
for disk_name, disk in iteritems(psutil.disk_io_counters(perdisk=True)):
for disk_name, disk in psutil.disk_io_counters(perdisk=True).items():
self.log.debug('IO Counters: %s -> %s', disk_name, disk)
try:
metric_tags = [] if self._custom_tags is None else self._custom_tags[:]
Expand Down Expand Up @@ -389,7 +388,7 @@ def _compile_pattern_filters(self, instance):
def _compile_valid_patterns(self, patterns, casing=IGNORE_CASE, extra_patterns=None):
valid_patterns = []

if isinstance(patterns, string_types):
if isinstance(patterns, str):
patterns = [patterns]
else:
patterns = list(patterns)
Expand Down Expand Up @@ -419,7 +418,7 @@ def _compile_tag_re(self):
Compile regex strings from device_tag_re option and return list of compiled regex/tag pairs
"""
device_tag_list = []
for regex_str, tags in iteritems(self._device_tag_re):
for regex_str, tags in self._device_tag_re.items():
try:
device_tag_list.append([re.compile(regex_str, IGNORE_CASE), [t.strip() for t in tags.split(',')]])
except TypeError:
Expand Down
13 changes: 6 additions & 7 deletions ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import requests
from dateutil import parser
from six import iteritems

from datadog_checks.base import AgentCheck
from datadog_checks.base.utils.common import round_value
Expand Down Expand Up @@ -192,7 +191,7 @@ def check(self, _):
## Ephemeral Storage Metrics
if 'EphemeralStorageMetrics' in metadata:
es_metrics = metadata['EphemeralStorageMetrics']
for field_name, metric_value in iteritems(es_metrics):
for field_name, metric_value in es_metrics.items():
metric_name = EPHEMERAL_STORAGE_GAUGE_METRICS.get(field_name)
self.gauge(metric_name, metric_value, task_tags)

Expand Down Expand Up @@ -229,7 +228,7 @@ def check(self, _):
self.service_check('fargate_check', AgentCheck.WARNING, message=msg, tags=custom_tags)
self.log.warning(msg, exc_info=True)

for container_id, container_stats in iteritems(stats):
for container_id, container_stats in stats.items():
if container_id not in exlcuded_cid:
self.submit_perf_metrics(container_tags, container_id, container_stats)

Expand Down Expand Up @@ -337,7 +336,7 @@ def submit_perf_metrics(self, container_tags, container_id, container_stats):
self.gauge('ecs.fargate.mem.limit', value, tags)

# I/O metrics
for blkio_cat, metric_name in iteritems(IO_METRICS):
for blkio_cat, metric_name in IO_METRICS.items():
read_counter = write_counter = 0

blkio_stats = container_stats.get("blkio_stats", {}).get(blkio_cat)
Expand All @@ -363,13 +362,13 @@ def submit_perf_metrics(self, container_tags, container_id, container_stats):

# Network metrics
networks = container_stats.get('networks', {})
for network_interface, network_stats in iteritems(networks):
for network_interface, network_stats in networks.items():
network_tags = tags + ["interface:{}".format(network_interface)]
for field_name, metric_name in iteritems(NETWORK_GAUGE_METRICS):
for field_name, metric_name in NETWORK_GAUGE_METRICS.items():
metric_value = network_stats.get(field_name)
if metric_value is not None:
self.gauge(metric_name, metric_value, network_tags)
for field_name, metric_name in iteritems(NETWORK_RATE_METRICS):
for field_name, metric_name in NETWORK_RATE_METRICS.items():
metric_value = network_stats.get(field_name)
if metric_value is not None:
self.rate(metric_name, metric_value, network_tags)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from copy import deepcopy
from re import match, search, sub

from six import iteritems

from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck
from datadog_checks.base.errors import CheckException

Expand Down Expand Up @@ -200,7 +198,7 @@ def submit_metric(self, metric_suffix, metric, scraper_config, gauge=True, monot
# Explicit shallow copy of the instance tags
_tags = list(scraper_config['custom_tags'])

for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]):
for label_name, label_value in sample[self.SAMPLE_LABELS].items():
_tags.append('{}:{}'.format(label_name, label_value))
if gauge:
# submit raw metric
Expand Down
16 changes: 7 additions & 9 deletions mapreduce/datadog_checks/mapreduce/mapreduce.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
# (C) Datadog, Inc. 2010-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)

from urllib.parse import urljoin, urlsplit, urlunsplit

from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout
from simplejson import JSONDecodeError
from six import iteritems, itervalues
from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit

from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
from datadog_checks.mapreduce.metrics import (
Expand Down Expand Up @@ -93,7 +91,7 @@ def check(self, instance):

# Report success after gathering all metrics from Application Master
if running_jobs:
job_id, metrics = next(iteritems(running_jobs))
job_id, metrics = next(iter(running_jobs.items()))
am_address = self._get_url_base(metrics['tracking_url'])

self.service_check(
Expand Down Expand Up @@ -249,7 +247,7 @@ def _mapreduce_job_metrics(self, running_apps, addl_tags):
"""
running_jobs = {}

for app_name, tracking_url in itervalues(running_apps):
for app_name, tracking_url in running_apps.values():

metrics_json = self._rest_request_to_json(
tracking_url, self.MAPREDUCE_JOBS_PATH, self.MAPREDUCE_SERVICE_CHECK
Expand Down Expand Up @@ -289,7 +287,7 @@ def _mapreduce_job_counters_metrics(self, running_jobs, addl_tags):
"""
Get custom metrics specified for each counter
"""
for job_metrics in itervalues(running_jobs):
for job_metrics in running_jobs.values():
job_name = job_metrics['job_name']

# Check if the job_name exist in the custom metrics
Expand Down Expand Up @@ -344,7 +342,7 @@ def _mapreduce_task_metrics(self, running_jobs, addl_tags):
Get metrics for each MapReduce task
Return a dictionary of {task_id: 'tracking_url'} for each MapReduce task
"""
for job_stats in itervalues(running_jobs):
for job_stats in running_jobs.values():

metrics_json = self._rest_request_to_json(
job_stats['tracking_url'], 'tasks', self.MAPREDUCE_SERVICE_CHECK, tags=addl_tags
Expand Down Expand Up @@ -376,7 +374,7 @@ def _set_metrics_from_json(self, metrics_json, metrics, tags):
"""
Parse the JSON response and set the metrics
"""
for status, (metric_name, metric_type) in iteritems(metrics):
for status, (metric_name, metric_type) in metrics.items():
metric_status = metrics_json.get(status)

if metric_status is not None:
Expand Down Expand Up @@ -415,7 +413,7 @@ def _rest_request_to_json(self, address, object_path, service_name=None, tags=No

# Add kwargs as arguments
if kwargs:
query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(kwargs)])
query = '&'.join(['{}={}'.format(key, value) for key, value in kwargs.items()])
url = urljoin(url, '?' + query)

try:
Expand Down
7 changes: 3 additions & 4 deletions mcache/datadog_checks/mcache/mcache.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from __future__ import division

import bmemcached
from six import iteritems, itervalues

from datadog_checks.base import AgentCheck, ConfigurationError

Expand Down Expand Up @@ -111,7 +110,7 @@ def _process_response(self, response):
if len(response) != 1:
raise BadResponseError("Malformed response: {}".format(response))

stats = list(itervalues(response))[0]
stats = list(response.values())[0]
if not len(stats):
raise BadResponseError("Malformed response for host: {}".format(stats))

Expand Down Expand Up @@ -177,7 +176,7 @@ def _get_metrics(self, client, tags, service_check_tags=None):
raise

def _get_optional_metrics(self, client, tags, options=None):
for arg, metrics_args in iteritems(self.OPTIONAL_STATS):
for arg, metrics_args in self.OPTIONAL_STATS.items():
if not options or options.get(arg, False):
try:
optional_rates = metrics_args[0]
Expand All @@ -187,7 +186,7 @@ def _get_optional_metrics(self, client, tags, options=None):
stats = self._process_response(client.stats(arg))
prefix = "memcache.{}".format(arg)

for metric, val in iteritems(stats):
for metric, val in stats.items():
# Check if metric is a gauge or rate
metric_tags = []
if optional_fn:
Expand Down
10 changes: 5 additions & 5 deletions mesos_master/datadog_checks/mesos_master/mesos_master.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
Collects metrics from mesos master node, only the leader is sending metrics.
"""
from urllib.parse import urlparse

import requests
from six import iteritems
from six.moves.urllib.parse import urlparse

from datadog_checks.base import AgentCheck
from datadog_checks.base.errors import CheckException
Expand Down Expand Up @@ -305,7 +305,7 @@ def check(self, instance):
framework_tags = ['framework_name:' + framework['name']] + tags
self.GAUGE('mesos.framework.total_tasks', len(framework['tasks']), tags=framework_tags)
resources = framework['used_resources']
for key_name, (metric_name, metric_func) in iteritems(self.FRAMEWORK_METRICS):
for key_name, (metric_name, metric_func) in self.FRAMEWORK_METRICS.items():
metric_func(self, metric_name, resources[key_name], tags=framework_tags)

role_metrics = self._get_master_roles(url, instance_tags)
Expand All @@ -314,7 +314,7 @@ def check(self, instance):
role_tags = ['mesos_role:' + role['name']] + tags
self.GAUGE('mesos.role.frameworks.count', len(role['frameworks']), tags=role_tags)
self.GAUGE('mesos.role.weight', role['weight'], tags=role_tags)
for key_name, (metric_name, metric_func) in iteritems(self.ROLE_RESOURCES_METRICS):
for key_name, (metric_name, metric_func) in self.ROLE_RESOURCES_METRICS.items():
try:
metric_func(self, metric_name, role['resources'][key_name], tags=role_tags)
except KeyError:
Expand All @@ -335,7 +335,7 @@ def check(self, instance):
self.STATS_METRICS,
]
for m in metrics:
for key_name, (metric_name, metric_func) in iteritems(m):
for key_name, (metric_name, metric_func) in m.items():
if key_name in stats_metrics:
metric_func(self, metric_name, stats_metrics[key_name], tags=tags)

Expand Down
13 changes: 3 additions & 10 deletions mongo/datadog_checks/mongo/collectors/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,9 @@

import re

from six import PY3, iteritems

from datadog_checks.base import AgentCheck
from datadog_checks.mongo.metrics import CASE_SENSITIVE_METRIC_NAME_SUFFIXES

if PY3:
long = int


class MongoCollector(object):
"""The base collector object, can be considered abstract.
Expand Down Expand Up @@ -46,7 +41,7 @@ def _normalize(self, metric_name, submit_method, prefix=None):
metric_suffix = "ps" if submit_method == AgentCheck.rate else ""

# Replace case-sensitive metric name characters
for pattern, repl in iteritems(CASE_SENSITIVE_METRIC_NAME_SUFFIXES):
for pattern, repl in CASE_SENSITIVE_METRIC_NAME_SUFFIXES.items():
metric_name = re.compile(pattern).sub(repl, metric_name)

# Normalize, and wrap
Expand Down Expand Up @@ -93,11 +88,9 @@ def _submit_payload(self, payload, additional_tags=None, metrics_to_collect=None
continue

# value is now status[x][y][z]
if not isinstance(value, (int, long, float)):
if not isinstance(value, (int, float)):
raise TypeError(
u"{0} value is a {1}, it should be an int, a float or a long instead.".format(
metric_name, type(value)
)
u"{0} value is a {1}, it should be an int, or a float instead.".format(metric_name, type(value))
)

# Submit the metric
Expand Down
3 changes: 1 addition & 2 deletions mongo/datadog_checks/mongo/collectors/coll_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Licensed under a 3-clause BSD style license (see LICENSE)

from pymongo.errors import OperationFailure
from six import iteritems

from datadog_checks.base import AgentCheck
from datadog_checks.mongo.collectors.base import MongoCollector
Expand Down Expand Up @@ -96,7 +95,7 @@ def collect(self, api):
# Submit the indexSizes metrics manually
if index_sizes:
metric_name_alias = self._normalize("collection.indexSizes", AgentCheck.gauge)
for idx, val in iteritems(index_sizes):
for idx, val in index_sizes.items():
# we tag the index
idx_tags = self.base_tags + additional_tags + ["index:%s" % idx]
self.gauge(metric_name_alias, val, tags=idx_tags)
5 changes: 1 addition & 4 deletions mongo/datadog_checks/mongo/collectors/top.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)

from six import iteritems

from datadog_checks.mongo.collectors.base import MongoCollector
from datadog_checks.mongo.common import MongosDeployment, ReplicaSetDeployment
from datadog_checks.mongo.metrics import TOP_METRICS
Expand All @@ -26,7 +23,7 @@ def compatible_with(self, deployment):

def collect(self, api):
dbtop = api["admin"].command('top')
for ns, ns_metrics in iteritems(dbtop['totals']):
for ns, ns_metrics in dbtop['totals'].items():
if "." not in ns:
continue

Expand Down
6 changes: 2 additions & 4 deletions mysql/datadog_checks/mysql/collection_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,14 @@
# Licensed under Simplified BSD License (see LICENSE)
import logging

from six import iteritems, text_type

log = logging.getLogger(__name__)


def collect_all_scalars(key, dictionary):
if key not in dictionary or dictionary[key] is None:
yield None, None
elif isinstance(dictionary[key], dict):
for tag, _ in iteritems(dictionary[key]):
for tag, _ in dictionary[key].items():
yield tag, collect_type(tag, dictionary[key], float)
else:
yield None, collect_type(key, dictionary, float)
Expand All @@ -23,7 +21,7 @@ def collect_scalar(key, mapping):


def collect_string(key, mapping):
return collect_type(key, mapping, text_type)
return collect_type(key, mapping, str)


def collect_type(key, mapping, the_type):
Expand Down
Loading

0 comments on commit 3765e71

Please sign in to comment.