Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix misconfigured alerts between Kibana ndjson & Elastalert #293

Merged
merged 3 commits into from
Oct 7, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
142 changes: 110 additions & 32 deletions .github/workflows/auto-pr-to-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,100 @@ on:
description: 'PR number to process'
Copy link
Member Author

@rikukissa rikukissa Oct 7, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is just an update to the auto-pr pipeline. Helps me now put this to 1.5.2 and 1.6.0

required: true
default: ''
dry_run:
description: 'Dry run'
required: false
default: false
type: boolean

jobs:
create-pr:
resolve-releases:
if: ${{ github.event_name == 'pull_request' && github.event.pull_request.merged == true || github.event_name == 'workflow_dispatch' }}
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.resolve-applicable-versions.outputs.matrix }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Get PR details from workflow dispatch
if: ${{ github.event_name == 'workflow_dispatch' }}
id: get_pr_details_dispatch
run: |
PR_NUMBER=${{ github.event.inputs.pr_number }}
PR_DATA=$(curl -s -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/opencrvs/opencrvs-countryconfig/pulls/$PR_NUMBER)
echo "MILESTONE=$(printf '%s' $PR_DATA | jq -r '.milestone.title')" >> $GITHUB_ENV
env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}

- name: Get PR details from event
if: ${{ github.event_name == 'pull_request' }}
id: get_pr_details_event
run: |
echo "MILESTONE=${{ github.event.pull_request.milestone.title }}" >> $GITHUB_ENV
env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}

- name: Check for milestone and if release branch exists
continue-on-error: true
id: resolve-applicable-versions
run: |
if [ -z "${{ env.MILESTONE }}" ] || [ "${{ env.MILESTONE }}" = "null" ]; then
echo "No milestone set. Exiting."
exit 1
fi

filter_versions() {
local input_version=$1

# List remote branches, extract versions, and sort them semantically
versions=$(git ls-remote --heads origin 'release-*' | awk -F'release-' '{print $2}' | sort -V)

# Filter out versions less than the input version
filtered_versions=$(echo "$versions" | awk -v input="$input_version" '
function compare_versions(v1, v2) {
split(v1, a, /[.v]/);
split(v2, b, /[.v]/);
for (i = 2; i <= 4; i++) {
if (a[i] < b[i]) return -1;
if (a[i] > b[i]) return 1;
}
return 0;
}
{
if (compare_versions($0, input) >= 0) {
print $0
}
}')

# Keep only the highest patch version for each minor version
echo "$filtered_versions" | awk -F. '
{
minor = $1 "." $2;
patches[minor] = $0;
}
END {
for (minor in patches) {
print patches[minor];
}
}' | sort -V
}

versions=$(filter_versions "${{ env.MILESTONE }}")
json_array=$(echo "$versions" | jq -R -s -c 'split("\n") | map(select(. != ""))')
echo "matrix=$json_array" >> $GITHUB_OUTPUT

create-pr:
needs: resolve-releases
runs-on: ubuntu-22.04
if: ${{ always() && needs.resolve-releases.result == 'success' }}

strategy:
fail-fast: false
matrix:
version: ${{fromJson(needs.resolve-releases.outputs.matrix)}}

steps:
- name: Checkout repository
Expand All @@ -39,13 +128,12 @@ jobs:
echo "PR_ID=$(printf '%s' $PR_DATA | jq -r '.number')" >> $GITHUB_ENV
echo "PR_AUTHOR=$(printf '%s' $PR_DATA | jq -r '.user.login')" >> $GITHUB_ENV
echo "PR_MERGER=$(printf '%s' $PR_DATA | jq -r '.merged_by.login')" >> $GITHUB_ENV
echo "MILESTONE=$(printf '%s' $PR_DATA | jq -r '.milestone.title')" >> $GITHUB_ENV
echo "BASE_BRANCH=$(printf '%s' $PR_DATA | jq -r '.base.ref')" >> $GITHUB_ENV
echo "HEAD_BRANCH=$(printf '%s' $PR_DATA | jq -r '.head.ref')" >> $GITHUB_ENV
echo "PR_TITLE=$(printf '%s' $PR_DATA | jq -r '.title')" >> $GITHUB_ENV
echo "BASE_SHA=$(printf '%s' $PR_DATA | jq -r '.base.sha')" >> $GITHUB_ENV
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}

- name: Get PR details from event
if: ${{ github.event_name == 'pull_request' }}
Expand All @@ -54,7 +142,6 @@ jobs:
PR_NUMBER=${{ github.event.pull_request.number }}
echo "PR_ID=${{ github.event.pull_request.number }}" >> $GITHUB_ENV
echo "PR_AUTHOR=${{ github.event.pull_request.user.login }}" >> $GITHUB_ENV
echo "MILESTONE=${{ github.event.pull_request.milestone.title }}" >> $GITHUB_ENV
echo "BASE_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
echo "HEAD_BRANCH=${{ github.event.pull_request.head.ref }}" >> $GITHUB_ENV
echo "PR_TITLE=${{ github.event.pull_request.title }}" >> $GITHUB_ENV
Expand All @@ -64,40 +151,24 @@ jobs:
MERGED_BY_LOGIN=$(echo "$PR_DETAILS" | jq -r '.mergedBy.login')
echo "PR_MERGER=$MERGED_BY_LOGIN" >> $GITHUB_ENV
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

- name: Check for milestone and if release branch exists
continue-on-error: true
id: check_release_branch
run: |
if [ -z "${{ env.MILESTONE }}" ]; then
echo "No milestone set. Exiting."
exit 1
fi

RELEASE_BRANCH="release-${{ env.MILESTONE }}"

# Check if the release branch exists
if git ls-remote --heads origin $RELEASE_BRANCH | grep -q "refs/heads/$RELEASE_BRANCH"; then
echo "RELEASE_BRANCH=${RELEASE_BRANCH}" >> $GITHUB_ENV
else
echo "Release branch $RELEASE_BRANCH does not exist. Exiting."
exit 1
fi
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}

- name: Create and push the new branch for the PR
if: ${{ steps.check_release_branch.outcome == 'success' }}
env:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
run: |
SEMANTIC_PR_TITLE="${{ env.PR_TITLE }}"
RELEASE_BRANCH="release-${{ matrix.version }}"
MILESTONE="${{ matrix.version }}"

# Check for semantic prefix
if [[ $SEMANTIC_PR_TITLE =~ ^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert|wip|merge)\: ]]; then
SEMANTIC_PR_TITLE="${BASH_REMATCH[1]}(${MILESTONE}): ${SEMANTIC_PR_TITLE#*: }"
else
SEMANTIC_PR_TITLE="🍒 Merge changes from PR #${{ env.PR_ID }} to ${{ env.RELEASE_BRANCH }}"
SEMANTIC_PR_TITLE="🍒 Merge changes from PR #${{ env.PR_ID }} to $RELEASE_BRANCH"
fi

PR_BODY="Automated PR to merge changes from develop to ${{ env.RELEASE_BRANCH }}"
PR_BODY="Automated PR to merge changes from develop to $RELEASE_BRANCH"

# Configure git
git config user.name "github-actions"
Expand All @@ -106,10 +177,10 @@ jobs:

# Fetch and checkout the release branch
git fetch --all --unshallow
git checkout ${{ env.RELEASE_BRANCH }}
git checkout $RELEASE_BRANCH

# Create a new branch for the PR
NEW_BRANCH="auto-pr-${{ env.RELEASE_BRANCH }}-${{ env.PR_ID }}-$RANDOM"
NEW_BRANCH="auto-pr-$RELEASE_BRANCH-${{ env.PR_ID }}-$RANDOM"
git checkout -b $NEW_BRANCH

echo "HEAD_BRANCH: ${{ env.HEAD_BRANCH }}"
Expand Down Expand Up @@ -154,6 +225,15 @@ jobs:
"
}

if [ "${{ github.event.inputs.dry_run }}" == "true" ]; then
echo "This is a dry run."
echo "Would have pushed the new branch $NEW_BRANCH"
echo "PR title: $SEMANTIC_PR_TITLE"
echo "PR body:"
echo "$PR_BODY"
exit 0
fi

# Push the new branch
git push origin $NEW_BRANCH

Expand All @@ -167,6 +247,4 @@ jobs:
AUTHOR=${{ env.PR_MERGER }}
fi
fi
gh pr create --title "$SEMANTIC_PR_TITLE" --body "$PR_BODY" --head "$NEW_BRANCH" --base "${{ env.RELEASE_BRANCH }}" --assignee "$AUTHOR" --reviewer "$AUTHOR"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
gh pr create --title "$SEMANTIC_PR_TITLE" --body "$PR_BODY" --head "$NEW_BRANCH" --base "$RELEASE_BRANCH" --assignee "$AUTHOR" --reviewer "$AUTHOR"
4 changes: 2 additions & 2 deletions infrastructure/monitoring/elastalert/rules/alert.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,13 @@ filter:
should:
- term:
rule.name.keyword:
value: 'Available disk space in data partition'
value: 'Available disk space in root file system'
- term:
rule.name.keyword:
value: 'CPU under heavy load'
- term:
rule.name.keyword:
value: 'Low on available disk space'
value: 'Low on available disk space in data partition'
minimum_should_match: 1

alert: post2
Expand Down
14 changes: 8 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"scripts": {
"dev": "yarn start",
"precommit": "lint-staged",
"test": "echo 'no tests, yet'",
"test": "vitest",
"test:compilation": "tsc --noEmit",
"lint": "eslint -c .eslintrc.js",
"start": "cross-env NODE_ENV=development NODE_OPTIONS=--dns-result-order=ipv4first nodemon --exec ts-node -r tsconfig-paths/register src/index.ts",
Expand All @@ -34,8 +34,8 @@
"@graphql-codegen/add": "^3.1.1",
"@graphql-codegen/cli": "^3.3.1",
"@graphql-codegen/introspection": "^3.0.1",
"@graphql-codegen/typescript-operations": "^3.0.4",
"@graphql-codegen/typescript": "^3.0.4",
"@graphql-codegen/typescript-operations": "^3.0.4",
"@inquirer/editor": "^1.2.13",
"@octokit/core": "4.2.1",
"@types/google-libphonenumber": "^7.4.23",
Expand All @@ -49,19 +49,21 @@
"@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1",
"cypress-xpath": "^2.0.1",
"eslint": "^8.43.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint": "^8.43.0",
"husky": "1.0.0-rc.13",
"inquirer": "^9.2.12",
"js-yaml": "^4.1.0",
"kleur": "^4.1.5",
"libsodium-wrappers": "^0.7.13",
"lint-staged": "^7.1.0",
"node-ssh": "^13.2.0",
"nodemon": "^2.0.22",
"pino-pretty": "^11.0.0",
"prettier": "^2.8.8",
"react-intl": "^6.4.3"
"react-intl": "^6.4.3",
"vitest": "^2.1.2"
},
"dependencies": {
"@faker-js/faker": "^6.0.0-alpha.5",
Expand All @@ -76,8 +78,8 @@
"@types/hapi__hapi": "^20.0.0",
"@types/jwt-decode": "^2.2.1",
"@types/lodash": "^4.14.117",
"@types/node-fetch": "^2.6.2",
"@types/node": "^10.12.5",
"@types/node-fetch": "^2.6.2",
"@types/nodemailer": "^6.4.14",
"app-module-path": "^2.2.0",
"chalk": "^2.4.1",
Expand All @@ -89,8 +91,8 @@
"dotenv": "^16.4.5",
"esbuild": "^0.18.9",
"google-libphonenumber": "^3.2.32",
"graphql-tag": "^2.12.6",
"graphql": "^16.3.0",
"graphql-tag": "^2.12.6",
"handlebars": "^4.7.7",
"hapi-auth-jwt2": "10.4.0",
"hapi-pino": "^9.0.0",
Expand Down
54 changes: 54 additions & 0 deletions tests/verify-elastalert-kibana-alerts-match.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import { readdirSync, readFileSync } from 'fs'
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This now ensures the ndjson and the rule yaml files are always in sync

import yaml from 'js-yaml'
import { join } from 'path'
import { expect, it } from 'vitest'

function findAllValuesByKey(obj: unknown, key: string): any[] {
const result: any[] = []

const recurse = (item: unknown) => {
if (Array.isArray(item)) {
for (const element of item) {
recurse(element)
}
} else if (typeof item === 'object' && item !== null) {
for (const k in item) {
if (k === key) {
result.push(item[k])
}
recurse(item[k])
}
}
}

recurse(obj)
return result
}

it('all tests defined in Kibana config are also defined in Elastalert config', () => {
const allAlertNames = readFileSync(
join(__dirname, '../infrastructure/monitoring/kibana', 'config.ndjson'),
'utf8'
)
.split('\n')
.map((str) => JSON.parse(str))
.filter((item) => item.type === 'alert')
.map((item) => item.attributes.name)
.sort()
.filter((value, index, self) => self.indexOf(value) === index)

const ruleNameFilters = readdirSync(
join(__dirname, '../infrastructure/monitoring/elastalert/rules')
)
.map((file) =>
join(__dirname, '../infrastructure/monitoring/elastalert/rules', file)
)
.map((file) => readFileSync(file, 'utf8'))
.map((file) => yaml.load(file))
.flatMap((rule) => findAllValuesByKey(rule, 'rule.name.keyword'))
.map((x) => x.value)
.sort()
.filter((value, index, self) => self.indexOf(value) === index)

expect(ruleNameFilters).toEqual(allAlertNames)
})
Loading
Loading