Skip to content

Commit

Permalink
Merge branch 'develop' into ocrvs-7156
Browse files Browse the repository at this point in the history
  • Loading branch information
Nil20 committed Aug 7, 2024
2 parents 1c4bd08 + ae12c00 commit 3216863
Show file tree
Hide file tree
Showing 25 changed files with 187 additions and 123 deletions.
53 changes: 25 additions & 28 deletions .github/workflows/auto-pr-to-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,19 +33,17 @@ jobs:
id: get_pr_details_dispatch
run: |
PR_NUMBER=${{ github.event.inputs.pr_number }}
PR_DATA=$(gh pr view $PR_NUMBER --json number,headRefName,baseRefName,mergedBy,mergeCommit,author,milestone,title --jq '{number: .number, headRefName: .headRefName, baseRefName: .baseRefName, merger: .mergedBy.login, author: .author.login, milestone: .milestone.title, title: .title}')
echo "PR_ID=$(echo $PR_DATA | jq -r '.number')" >> $GITHUB_ENV
echo "PR_AUTHOR=$(echo $PR_DATA | jq -r '.author')" >> $GITHUB_ENV
echo "PR_MERGER=$(echo $PR_DATA | jq -r '.merger')" >> $GITHUB_ENV
echo "MILESTONE=$(echo $PR_DATA | jq -r '.milestone')" >> $GITHUB_ENV
echo "BASE_BRANCH=$(echo $PR_DATA | jq -r '.baseRefName')" >> $GITHUB_ENV
echo "HEAD_BRANCH=$(echo $PR_DATA | jq -r '.headRefName')" >> $GITHUB_ENV
echo "PR_TITLE=$(echo $PR_DATA | jq -r '.title')" >> $GITHUB_ENV
LATEST_COMMIT_SHA=$(gh pr view $PR_NUMBER --json commits --jq '.commits[-1].oid')
FIRST_COMMIT_SHA=$(gh pr view $PR_NUMBER --json commits --jq '.commits[0].oid')
echo "LATEST_COMMIT_SHA=${LATEST_COMMIT_SHA}" >> $GITHUB_ENV
echo "FIRST_COMMIT_SHA=${FIRST_COMMIT_SHA}" >> $GITHUB_ENV
PR_DATA=$(curl -s -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/opencrvs/opencrvs-countryconfig/pulls/$PR_NUMBER)
# printf escapes the newlines in the JSON, so we can use jq to parse output such as:
# "body": "![image](https://github.com/user-attachments/assets/8eee5bcf-7692-490f-a19f-576623e09961)\r\n",
echo "PR_ID=$(printf '%s' $PR_DATA | jq -r '.number')" >> $GITHUB_ENV
echo "PR_AUTHOR=$(printf '%s' $PR_DATA | jq -r '.user.login')" >> $GITHUB_ENV
echo "PR_MERGER=$(printf '%s' $PR_DATA | jq -r '.merged_by.login')" >> $GITHUB_ENV
echo "MILESTONE=$(printf '%s' $PR_DATA | jq -r '.milestone.title')" >> $GITHUB_ENV
echo "BASE_BRANCH=$(printf '%s' $PR_DATA | jq -r '.base.ref')" >> $GITHUB_ENV
echo "HEAD_BRANCH=$(printf '%s' $PR_DATA | jq -r '.head.ref')" >> $GITHUB_ENV
echo "PR_TITLE=$(printf '%s' $PR_DATA | jq -r '.title')" >> $GITHUB_ENV
echo "BASE_SHA=$(printf '%s' $PR_DATA | jq -r '.base.sha')" >> $GITHUB_ENV
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

Expand All @@ -60,11 +58,7 @@ jobs:
echo "BASE_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
echo "HEAD_BRANCH=${{ github.event.pull_request.head.ref }}" >> $GITHUB_ENV
echo "PR_TITLE=${{ github.event.pull_request.title }}" >> $GITHUB_ENV
LATEST_COMMIT_SHA=$(gh pr view $PR_NUMBER --json commits --jq '.commits[-1].oid')
FIRST_COMMIT_SHA=$(gh pr view $PR_NUMBER --json commits --jq '.commits[0].oid')
echo "LATEST_COMMIT_SHA=${LATEST_COMMIT_SHA}" >> $GITHUB_ENV
echo "FIRST_COMMIT_SHA=${FIRST_COMMIT_SHA}" >> $GITHUB_ENV
echo "BASE_SHA=${{ github.event.pull_request.base.sha }}" >> $GITHUB_ENV
PR_DETAILS=$(gh pr view $PR_NUMBER --json mergedBy)
MERGED_BY_LOGIN=$(echo "$PR_DETAILS" | jq -r '.mergedBy.login')
Expand Down Expand Up @@ -111,28 +105,30 @@ jobs:
git config advice.mergeConflict false
# Fetch and checkout the release branch
git fetch --all
git fetch --all --unshallow
git checkout ${{ env.RELEASE_BRANCH }}
# Create a new branch for the PR
NEW_BRANCH="auto-pr-${{ env.RELEASE_BRANCH }}-${{ env.PR_ID }}-$RANDOM"
git checkout -b $NEW_BRANCH
echo "First commit: ${{ env.FIRST_COMMIT_SHA }}"
echo "Latest commit: ${{ env.LATEST_COMMIT_SHA }}"
COMMIT_RANGE="${{ env.FIRST_COMMIT_SHA }}..${{ env.LATEST_COMMIT_SHA }}"
echo "HEAD_BRANCH: ${{ env.HEAD_BRANCH }}"
echo "BASE_SHA: ${{ env.BASE_SHA }}"
if [ "${{ env.FIRST_COMMIT_SHA }}" == "${{ env.LATEST_COMMIT_SHA }}" ]; then
COMMIT_RANGE=${{ env.FIRST_COMMIT_SHA }}
fi
COMMIT_RANGE="${{ env.BASE_SHA }}..origin/${{ env.HEAD_BRANCH }}"
echo "Commit range: ${COMMIT_RANGE}"
echo "Commit range: $COMMIT_RANGE"
NON_MERGE_COMMITS=$(git log ${COMMIT_RANGE} --reverse --no-merges --pretty=format:"%h" -- | xargs)
echo "Ordered non-merge commits: $NON_MERGE_COMMITS"
# Attempt to cherry-pick the commits from the original PR
CHERRY_PICK_OUTPUT=$(git cherry-pick $COMMIT_RANGE 2>&1) || {
CHERRY_PICK_OUTPUT=$(git cherry-pick ${NON_MERGE_COMMITS} 2>&1) || {
git cherry-pick --abort || true
# If cherry-pick fails, create a placeholder commit
echo "Cherry-pick failed. Creating placeholder commit."
git reset --hard
git commit --allow-empty -m "Placeholder commit for PR #${{ env.PR_ID }}"
Expand All @@ -152,8 +148,9 @@ jobs:
git checkout $NEW_BRANCH
git reset --hard HEAD~1 # Remove placeholder commit
git cherry-pick $COMMIT_RANGE
git cherry-pick $NON_MERGE_COMMITS
\`\`\`
"
}
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/clear-environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ jobs:
name: 'Reset data'
environment: ${{ github.event.inputs.environment }}
runs-on: ubuntu-22.04
outputs:
outcome: ${{ steps.reset-data.outcome }}
timeout-minutes: 60
steps:
- name: Clone country config resource package
Expand All @@ -45,6 +47,7 @@ jobs:
known_hosts: ${{ env.KNOWN_HOSTS }}

- name: Reset data
id: reset-data
env:
HOST: ${{ vars.DOMAIN }}
ENV: ${{ vars.ENVIRONMENT_TYPE }}
Expand Down
22 changes: 13 additions & 9 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Changelog

## 1.7.0 (TBD)

### Bug fixes

- Kibana disk space alerts now work regardless of your disk device names. Alerts listen devices mounted both to `/` and `/data` (encrypted data partition)

## 1.6.0 (TBD)

### Breaking changes
Expand All @@ -20,7 +26,13 @@

See https://github.com/opencrvs/opencrvs-farajaland/pull/1005 for details

- #### Infrastructure
### Infrastructure

- Allow using staging to both period restore of production backup and also for backing up its own data to a different location using `backup_server_remote_target_directory` and `backup_server_remote_source_directory` ansible variables. This use case is mostly meant for OpenCRVS team internal use.

- Automate SSH key exchange between application and backup server. For staging servers, automatically fetch production backup encryption key if periodic restore is enabled

- Improved support for non-22 SSH port

- Treat backup host identically to other hosts. To migrate:

Expand Down Expand Up @@ -56,14 +68,6 @@
- Remove `splitView` option from DOCUMENT_UPLOADER_WITH_OPTION field [#114](https://github.com/opencrvs/opencrvs-countryconfig/pull/114)
- Enable authentication for certificates endpoint [#188](https://github.com/opencrvs/opencrvs-countryconfig/pull/188)

- #### Infrastructure

- Allow using staging to both period restore of production backup and also for backing up its own data to a different location using `backup_server_remote_target_directory` and `backup_server_remote_source_directory` ansible variables. This use case is mostly meant for OpenCRVS team internal use.

- Automate SSH key exchange between application and backup server. For staging servers, automatically fetch production backup encryption key if periodic restore is enabled

- Improved support for non-22 SSH port

## [1.4.1](https://github.com/opencrvs/opencrvs-countryconfig/compare/v1.4.0...v1.4.1)

- Improved logging for emails being sent
Expand Down
File renamed without changes.
9 changes: 9 additions & 0 deletions infrastructure/deployment/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,15 @@ echo
echo "Waiting 2 mins for mongo to deploy before working with data. Please note it can take up to 10 minutes for the entire stack to deploy in some scenarios."
echo

echo 'Setting up elastalert indices'

while true; do
if configured_ssh "/opt/opencrvs/infrastructure/elasticsearch/setup-elastalert-indices.sh"; then
break
fi
sleep 5
done

echo "Setting up Kibana config & alerts"

while true; do
Expand Down
20 changes: 10 additions & 10 deletions infrastructure/docker-compose.deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ services:
- overlay_net

filebeat:
image: docker.elastic.co/beats/filebeat:7.17.0
image: docker.elastic.co/beats/filebeat:8.14.3
user: root
networks:
- overlay_net
Expand All @@ -85,7 +85,7 @@ services:
- 'traefik.enable=false'

metricbeat:
image: docker.elastic.co/beats/metricbeat:7.17.13
image: docker.elastic.co/beats/metricbeat:8.14.3
user: root
cap_add:
- SYS_PTRACE
Expand Down Expand Up @@ -128,7 +128,7 @@ services:
[
'curl',
'-u',
'elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD}',
'kibana_system:${KIBANA_SYSTEM_PASSWORD}',
'-X',
'POST',
'http://kibana:5601/api/saved_objects/_import?overwrite=true',
Expand Down Expand Up @@ -156,7 +156,7 @@ services:
gelf-address: 'udp://127.0.0.1:12201'
tag: 'setup-kibana-config'
kibana:
image: docker.elastic.co/kibana/kibana:7.17.0
image: docker.elastic.co/kibana/kibana:8.14.3
restart: always
deploy:
labels:
Expand All @@ -173,8 +173,8 @@ services:
networks:
- overlay_net
environment:
- ELASTICSEARCH_USERNAME=elastic
- ELASTICSEARCH_PASSWORD=${ELASTICSEARCH_SUPERUSER_PASSWORD}
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_SYSTEM_PASSWORD}
configs:
- source: kibana.{{ts}}
target: /usr/share/kibana/config/kibana.yml
Expand Down Expand Up @@ -282,7 +282,6 @@ services:
- path.repo=/data/backups/elasticsearch
- cluster.name=docker-cluster
- network.host=0.0.0.0
- discovery.zen.minimum_master_nodes=1
- discovery.type=single-node
- xpack.security.enabled=true
- xpack.security.authc.api_key.enabled=true
Expand Down Expand Up @@ -365,6 +364,7 @@ services:
- APM_ELASTIC_PASSWORD=${ROTATING_APM_ELASTIC_PASSWORD}
- SEARCH_ELASTIC_USERNAME=search-user
- SEARCH_ELASTIC_PASSWORD=${ROTATING_SEARCH_ELASTIC_PASSWORD}
- KIBANA_SYSTEM_PASSWORD=${KIBANA_SYSTEM_PASSWORD}
- KIBANA_USERNAME=${KIBANA_USERNAME}
- KIBANA_PASSWORD=${KIBANA_PASSWORD}
volumes:
Expand All @@ -384,7 +384,7 @@ services:
gelf-address: 'udp://127.0.0.1:12201'
tag: 'setup-elasticsearch-users'
elastalert:
image: jertel/elastalert2:2.3.0
image: jertel/elastalert2:2.19.0
restart: unless-stopped
environment:
- ES_USERNAME=elastic
Expand All @@ -408,7 +408,7 @@ services:
tag: 'elastalert'

logstash:
image: logstash:7.17.0
image: logstash:8.14.3
command: logstash -f /etc/logstash/logstash.conf --verbose
ports:
- '12201:12201'
Expand All @@ -431,7 +431,7 @@ services:
- 'traefik.enable=false'
replicas: 1
apm-server:
image: docker.elastic.co/apm/apm-server:7.15.2
image: docker.elastic.co/apm/apm-server:7.17.22
cap_add: ['CHOWN', 'DAC_OVERRIDE', 'SETGID', 'SETUID']
cap_drop: ['ALL']
restart: always
Expand Down
13 changes: 11 additions & 2 deletions infrastructure/elasticsearch/roles/search_user.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,17 @@
{
"cluster": ["manage"],
"indices": [
{
"names": ["ocrvs"],
"privileges": ["write", "create", "create_index", "delete", "delete_index", "read"]
"names": ["ocrvs", "ocrvs-*"],
"privileges": [
"write",
"create",
"create_index",
"delete",
"delete_index",
"read",
"manage"
]
}
]
}
44 changes: 44 additions & 0 deletions infrastructure/elasticsearch/setup-elastalert-indices.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#!/usr/bin/env bash

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
#
# OpenCRVS is also distributed under the terms of the Civil Registration
# & Healthcare Disclaimer located at http://opencrvs.org/license.
#
# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS.

# Upgrading from 7 to 8 requires deleting elastalert indices. https://elastalert2.readthedocs.io/en/latest/recipes/faq.html#does-elastalert-2-support-elasticsearch-8

set -e

docker_command="docker run --rm --network=opencrvs_overlay_net curlimages/curl"

echo 'Waiting for availability of Elasticsearch'
ping_status_code=$($docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD -o /dev/null -w '%{http_code}' "http://elasticsearch:9200")

if [ "$ping_status_code" -ne 200 ]; then
echo "Elasticsearch is not ready. API returned status code: $ping_status_code"
exit 1
fi



echo 'Scaling down Elastalert'

docker service scale opencrvs_elastalert=0

echo 'Deleting Elastalert indices'
indices='elastalert_status,elastalert_status_error,elastalert_status_past,elastalert_status_silence,elastalert_status_status'

delete_status_code=$($docker_command --connect-timeout 60 -u elastic:$ELASTICSEARCH_SUPERUSER_PASSWORD -o /dev/null -w '%{http_code}' "http://elasticsearch:9200/${indices}" -X DELETE)

if [ "$delete_status_code" -ne 200 ]; then
echo "Could not delete indices. API returned status code: $delete_status_code"
exit 1
fi

echo 'Scaling up Elastalert'
docker service scale opencrvs_elastalert=1

33 changes: 0 additions & 33 deletions infrastructure/elasticsearch/setup-helpers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -230,36 +230,3 @@ function ensure_settings {

return $result
}


function create_elastic_index {
local index_name=$1
local elasticsearch_host="${ELASTICSEARCH_HOST:-elasticsearch}"

local -a args=( '-s' '-D-' '-m15' '-w' '%{http_code}'
"http://${elasticsearch_host}:9200/${index_name}"
'-X' 'PUT'
'-H' 'Content-Type: application/json'
)

if [[ -n "${ELASTIC_PASSWORD:-}" ]]; then
args+=( '-u' "elastic:${ELASTIC_PASSWORD}" )
fi

local -i result=1
local output

output="$(curl "${args[@]}")"

echo "${output}"

if [[ "${output: -3}" -eq 200 || $output == *"resource_already_exists"* ]]; then
result=0
fi

if ((result)); then
echo -e "\n${output::-3}\n"
fi

return $result
}
3 changes: 0 additions & 3 deletions infrastructure/elasticsearch/setup-settings.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,5 @@ echo "-------- $(date) --------"
log 'Waiting for availability of Elasticsearch'
wait_for_elasticsearch

log "Creating index for Elasticsearch. Index: ocrvs"
create_elastic_index "ocrvs"

log "Updating replicas for Elasticsearch"
ensure_settings "{\"index\":{\"number_of_replicas\":0}}"
1 change: 1 addition & 0 deletions infrastructure/elasticsearch/setup-users.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ users_passwords=(
[$SEARCH_ELASTIC_USERNAME]="${SEARCH_ELASTIC_PASSWORD:-}"
[beats_system]="${METRICBEAT_ELASTIC_PASSWORD:-}"
[apm_system]="${APM_ELASTIC_PASSWORD:-}"
[kibana_system]="${KIBANA_SYSTEM_PASSWORD:-}"
[$KIBANA_USERNAME]="${KIBANA_PASSWORD:-}"
)

Expand Down
Loading

0 comments on commit 3216863

Please sign in to comment.