Skip to content

Commit

Permalink
Update projects logic and refactor (#19087)
Browse files Browse the repository at this point in the history
* Fix new deployment logic and support in progress and queued deployments

* sync metadata

* update caddyfile

* fix

* add to completed date

* Refactor

* fixed e2e tests

* fixed instance fixture

* fixed e2e test

* added some e2e asserts

* fixed lint error

* all metrics asserted

* Add test asserting metadata, add test and handle failure of tasks

* store base tags

* Don't report completed and executing metrics if no data

* Add support for octopus server node metrics

* remove unused constants

* validate

* lint

* update e2e

* update metadata

* Use project name and project group name tags

* Use space name instead of space id

* Update log lines

* update test instance

* Update metric description

---------

Co-authored-by: Jose Manuel Almaza <[email protected]>
  • Loading branch information
sarah-witt and jose-manuel-almaza authored Nov 22, 2024
1 parent 7a3d2bd commit 179597c
Show file tree
Hide file tree
Showing 25 changed files with 1,573 additions and 1,451 deletions.
53 changes: 46 additions & 7 deletions octopus_deploy/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,30 @@ files:
example: http://localhost:80/api
type: string
required: true
- name: space
display_priority: 7
- name: spaces
display_priority: 5
description: |
Space to monitor
Filter your integration by spaces.
value:
example: Default
type: string
enabled: true
required: true
type: object
properties:
- name: limit
description: |
Maximum number of spaces to be processed.
type: integer
- name: include
type: array
items:
anyOf:
- type: string
- type: object
- name: exclude
type: array
items:
type: string
- name: interval
type: integer
example: {}
- name: project_groups
display_priority: 5
description: |
Expand All @@ -50,6 +65,30 @@ files:
- name: interval
type: integer
example: {}
- name: projects
display_priority: 5
description: |
Filter your integration by projects.
value:
type: object
properties:
- name: limit
description: |
Maximum number of projects to be processed.
type: integer
- name: include
type: array
items:
anyOf:
- type: string
- type: object
- name: exclude
type: array
items:
type: string
- name: interval
type: integer
example: {}
- template: instances/default
- template: instances/http
overrides:
Expand Down
474 changes: 254 additions & 220 deletions octopus_deploy/datadog_checks/octopus_deploy/check.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,17 @@ class ProjectGroups(BaseModel):
limit: Optional[int] = Field(None, description='Maximum number of project groups to be processed.\n')


class Projects(BaseModel):
model_config = ConfigDict(
arbitrary_types_allowed=True,
frozen=True,
)
exclude: Optional[tuple[str, ...]] = None
include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None
interval: Optional[int] = None
limit: Optional[int] = Field(None, description='Maximum number of projects to be processed.\n')


class Proxy(BaseModel):
model_config = ConfigDict(
arbitrary_types_allowed=True,
Expand All @@ -59,6 +70,17 @@ class Proxy(BaseModel):
no_proxy: Optional[tuple[str, ...]] = None


class Spaces(BaseModel):
model_config = ConfigDict(
arbitrary_types_allowed=True,
frozen=True,
)
exclude: Optional[tuple[str, ...]] = None
include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None
interval: Optional[int] = None
limit: Optional[int] = Field(None, description='Maximum number of spaces to be processed.\n')


class InstanceConfig(BaseModel):
model_config = ConfigDict(
validate_default=True,
Expand Down Expand Up @@ -91,12 +113,13 @@ class InstanceConfig(BaseModel):
password: Optional[str] = None
persist_connections: Optional[bool] = None
project_groups: Optional[ProjectGroups] = None
projects: Optional[Projects] = None
proxy: Optional[Proxy] = None
read_timeout: Optional[float] = None
request_size: Optional[float] = None
service: Optional[str] = None
skip_proxy: Optional[bool] = None
space: str
spaces: Optional[Spaces] = None
tags: Optional[tuple[str, ...]] = None
timeout: Optional[float] = None
tls_ca_cert: Optional[str] = None
Expand Down
23 changes: 0 additions & 23 deletions octopus_deploy/datadog_checks/octopus_deploy/constants.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,6 @@ instances:
#
- octopus_endpoint: http://localhost:80/api

## @param space - string - required
## Space to monitor
#
space: Default

## @param headers - mapping - optional
## Headers to use for every request. An Authorization header including the Octopus Deploy API key token is required
## for authentication for the REST API.
Expand All @@ -64,11 +59,21 @@ instances:
headers:
X-Octopus-ApiKey: <OCTOPUS_API_KEY>

## @param spaces - mapping - optional
## Filter your integration by spaces.
#
# spaces: {}

## @param project_groups - mapping - optional
## Filter your integration by project groups and projects.
#
# project_groups: {}

## @param projects - mapping - optional
## Filter your integration by projects.
#
# projects: {}

## @param auth_token - mapping - optional
## This allows for the use of authentication information from dynamic sources.
## Both a reader and writer must be configured.
Expand Down
23 changes: 0 additions & 23 deletions octopus_deploy/datadog_checks/octopus_deploy/error.py

This file was deleted.

19 changes: 0 additions & 19 deletions octopus_deploy/datadog_checks/octopus_deploy/project_groups.py

This file was deleted.

8 changes: 3 additions & 5 deletions octopus_deploy/metadata.csv
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags
octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,,
octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,,
octopus_deploy.deployment.completed_time,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,,
octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,,
octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,,
octopus_deploy.deployment.has_warnings_or_errors,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy warnings,,
octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,,
octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,,
octopus_deploy.deployment.executing_time,gauge,,second,,How long the deployment has been executing.,-1,octopus_deploy,octopus_deploy deploy dur,,
octopus_deploy.deployment.queued_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,,
octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,,
octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,,
octopus_deploy.server_node.count,gauge,,,,Number of Octopus server nodes discovered.,-1,octopus_deploy,octopus_deploy server count,,
Expand Down
47 changes: 26 additions & 21 deletions octopus_deploy/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from datadog_checks.dev import docker_run
from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints
from datadog_checks.dev.fs import get_here
from datadog_checks.dev.http import MockResponse

from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB

Expand All @@ -26,15 +27,15 @@ def dd_environment():
endpoint = INSTANCE["octopus_endpoint"]
conditions = [
CheckDockerLogs(identifier='octopus-api', patterns=['server running']),
CheckEndpoints(f'{endpoint}/spaces'),
CheckEndpoints(f'{endpoint}/api/spaces'),
]
with docker_run(compose_file, conditions=conditions):
yield INSTANCE


@pytest.fixture
def instance():
return {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'}
return INSTANCE


def get_json_value_from_file(file_path):
Expand All @@ -55,9 +56,7 @@ def process_files(dir, response_parent):
for file in dir.rglob('*'):
if file.is_file() and file.stem != ".slash":
relative_dir_path = (
"/"
+ (str(file.parent.relative_to(dir)) if str(file.parent.relative_to(dir)) != "." else "")
+ ("/" if (file.parent / ".slash").is_file() else "")
"/" + str(file.parent.relative_to(dir)) + ("/" if (file.parent / ".slash").is_file() else "")
)
if relative_dir_path not in response_parent:
response_parent[relative_dir_path] = {}
Expand All @@ -70,14 +69,24 @@ def process_dir(dir, response_parent):

def create_responses_tree():
root_dir_path = os.path.join(get_here(), 'fixtures')
method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name == 'GET']
method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name in ['GET', 'POST']]
for method_subdir in method_subdirs:
process_dir(method_subdir, responses_map)

def method(method, url, file='response', headers=None, params=None):
filename = file
request_path = url

request_path = request_path.replace('?', '/')
if params:
param_string = ""
for key, val in params.items():
if type(val) is list:
val_string = ','.join(f'{str(val_item)}' for val_item in val)
else:
val_string = str(val)
param_string += ("/" if param_string else "") + f'{key}={val_string}'
request_path = '{}/{}'.format(url, param_string)
print(request_path)
response = responses_map.get(method, {}).get(request_path, {}).get(filename)
return response

Expand All @@ -88,7 +97,6 @@ def method(method, url, file='response', headers=None, params=None):
@pytest.fixture
def mock_http_call(mock_responses):
def call(method, url, file='response', headers=None, params=None):

response = mock_responses(method, url, file=file, headers=headers, params=params)
if response is not None:
return response
Expand All @@ -105,25 +113,22 @@ def call(method, url, file='response', headers=None, params=None):
def mock_http_get(request, monkeypatch, mock_http_call):
param = request.param if hasattr(request, 'param') and request.param is not None else {}
http_error = param.pop('http_error', {})
data = param.pop('mock_data', {})
elapsed_total_seconds = param.pop('elapsed_total_seconds', {})

def get(url, *args, **kwargs):
method = 'GET'
url = get_url_path(url)
request_path = url.replace('?', '/')
if http_error and url in http_error:
return http_error[url]
if data and url in data:
return MockResponse(json_data=data[url], status_code=200)
headers = kwargs.get('headers')
params = kwargs.get('params')
if params:
param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items())
request_path = f'{url}/{param_string}'

request_path = request_path.replace(" ", "")
if http_error and request_path in http_error:
return http_error[request_path]

mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0)))
mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params))
mock_status_code = mock.MagicMock(return_value=200)
headers = kwargs.get('headers')

mock_json = mock.MagicMock(return_value=mock_http_call(method, request_path, headers=headers))
return mock.MagicMock(json=mock_json, status_code=mock_status_code)
return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code)

mock_get = mock.MagicMock(side_effect=get)
monkeypatch.setattr('requests.get', mock_get)
Expand Down
Loading

0 comments on commit 179597c

Please sign in to comment.