Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PLINT-484] Support deployment logs #18877

Draft
wants to merge 1 commit into
base: sarah/add-octopus-integration
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions octopus_deploy/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,9 @@ files:
example: {}
- template: instances/default
- template: instances/http
- template: logs
example:
- type: integration
source: octopus-deploy
service: <SERVICE_NAME>

42 changes: 36 additions & 6 deletions octopus_deploy/datadog_checks/octopus_deploy/check.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# (C) Datadog, Inc. 2024-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import copy
from datetime import datetime, timedelta

from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout
Expand All @@ -9,6 +10,7 @@
from datadog_checks.base.errors import CheckException
from datadog_checks.base.utils.discovery import Discovery
from datadog_checks.base.utils.models.types import copy_raw
from datadog_checks.base.utils.time import get_timestamp

from .config_models import ConfigMixin
from .constants import (
Expand Down Expand Up @@ -95,17 +97,29 @@ def _get_new_tasks_for_project(self, project):
]

tags = [f'task_name:{task_name}', f'task_state:{state}']
task_tags = project_tags + tags

self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_WARNINGS_METRIC, has_warnings, tags=self.base_tags + project_tags + tags)
self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + task_tags)
self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + task_tags)
self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + task_tags)
self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + task_tags)
self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + task_tags)
self.gauge(DEPLOY_WARNINGS_METRIC, has_warnings, tags=self.base_tags + task_tags)

if self.logs_enabled:
self._collect_task_logs(task_id, task_tags)

new_completed_time = new_completed_time + timedelta(milliseconds=1)
project.last_completed_time = new_completed_time

@handle_error
def _collect_task_logs(self, task_id, task_tags):
url = f"{self.config.octopus_endpoint}/{self.space_id}/tasks/{task_id}/details"
response = self.http.get(url)
response.raise_for_status()
logs = response.json().get('ActivityLogs', [])
self._submit_activity_logs(logs, self.base_tags + task_tags)

def _initialize_projects(self, project_group, project_group_config):
normalized_projects = normalize_discover_config_include(
self.log, project_group_config.get("projects") if project_group_config else None
Expand Down Expand Up @@ -254,6 +268,22 @@ def check(self, _):

self.collect_server_nodes_metrics()

def _submit_activity_logs(self, activity_logs, tags):
for log in activity_logs:
name = log.get("Name")
log_elements = log.get("LogElements", [])
children = log.get("Children", [])

for log_element in log_elements:
payload = {"ddtags": copy.deepcopy(tags)}
payload['message'] = log_element.get("MessageText")
payload['timestamp'] = get_timestamp(datetime.fromisoformat(log_element.get("OccurredAt")))
payload['status'] = log_element.get("Category")
payload['stage_name'] = name
self.send_log(payload)

self._submit_activity_logs(children, tags)


# Discovery class requires 'include' to be a dict, so this function is needed to normalize the config
def normalize_discover_config_include(log, config):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -411,3 +411,23 @@ instances:
## Whether or not to allow URL redirection.
#
# allow_redirects: true

## Log Section
##
## type - required - Type of log input source (tcp / udp / file / windows_event).
## port / path / channel_path - required - Set port if type is tcp or udp.
## Set path if type is file.
## Set channel_path if type is windows_event.
## source - required - Attribute that defines which integration sent the logs.
## encoding - optional - For file specifies the file encoding. Default is utf-8. Other
## possible values are utf-16-le and utf-16-be.
## service - optional - The name of the service that generates the log.
## Overrides any `service` defined in the `init_config` section.
## tags - optional - Add tags to the collected logs.
##
## Discover Datadog log collection: https://docs.datadoghq.com/logs/log_collection/
#
# logs:
# - type: integration
# source: octopus-deploy
# service: <SERVICE_NAME>
Loading
Loading