From 9b0a66a1c9102441c1c63ed1ba12de4360543b10 Mon Sep 17 00:00:00 2001 From: euanmillar Date: Thu, 9 Nov 2023 16:40:45 +0000 Subject: [PATCH 1/3] Bump release number --- .github/workflows/deploy-prod.yml | 2 +- .github/workflows/deploy.yml | 2 +- package.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index 01fd6b22..90bc2372 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -13,7 +13,7 @@ on: core-image-tag: description: Core DockerHub image tag required: true - default: 'v1.3.1' + default: 'v1.3.2' countryconfig-image-tag: description: Your Country Config DockerHub image tag required: true diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 8584331e..65914dcf 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -14,7 +14,7 @@ on: core-image-tag: description: Core DockerHub image tag required: true - default: 'v1.3.1' + default: 'v1.3.2' countryconfig-image-tag: description: Your Country Config DockerHub image tag required: true diff --git a/package.json b/package.json index 1ecdaa8c..7dc0648b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@opencrvs/countryconfig", - "version": "1.3.1", + "version": "1.3.2", "description": "OpenCRVS country configuration for reference data", "license": "MPL-2.0", "husky": { From 50e996f5050be942a05a4ec1516f55d1648540bd Mon Sep 17 00:00:00 2001 From: euanmillar Date: Wed, 8 Nov 2023 21:38:53 +0000 Subject: [PATCH 2/3] fix conflicts --- .github/workflows/backup-check.yml | 64 +++++ .github/workflows/deploy-prod.yml | 35 ++- .github/workflows/deploy.yml | 35 ++- .github/workflows/provision.yml | 150 ++++++++++++ .github/workflows/publish-release.yml | 2 +- .github/workflows/publish-to-dockerhub.yml | 2 +- .github/workflows/test-restore-backup.yml | 92 +++++++ infrastructure/backup-check.sh | 67 ++++++ infrastructure/create-github-environment.js | 226 ++++++++++++++++++ infrastructure/deploy.sh | 75 +++++- infrastructure/emergency-restore-metadata.sh | 4 + .../monitoring/kibana/setup-config.sh | 20 ++ infrastructure/server-setup/playbook-3.yml | 36 +++ infrastructure/server-setup/playbook-5.yml | 36 +++ infrastructure/setup-deploy-config.sh | 3 + infrastructure/test-restore-backup.sh | 115 +++++++++ package.json | 3 + yarn.lock | 94 ++++++++ 18 files changed, 1043 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/backup-check.yml create mode 100644 .github/workflows/provision.yml create mode 100644 .github/workflows/test-restore-backup.yml create mode 100755 infrastructure/backup-check.sh create mode 100644 infrastructure/create-github-environment.js create mode 100755 infrastructure/monitoring/kibana/setup-config.sh create mode 100755 infrastructure/test-restore-backup.sh diff --git a/.github/workflows/backup-check.yml b/.github/workflows/backup-check.yml new file mode 100644 index 00000000..90c485cb --- /dev/null +++ b/.github/workflows/backup-check.yml @@ -0,0 +1,64 @@ +name: Check backup(production) +run-name: Checking if a production backup exists ${{ github.event.inputs.backup-label }} +on: + workflow_dispatch: + inputs: + environment: + type: choice + description: Environment to check backup + required: true + default: 'production' + options: + - production + branch_name: + description: Branch to run workflow from + default: develop + required: true + backup-label: + description: Your Backup label + required: true +jobs: + check-backup: + environment: ${{ github.event.inputs.environment }} + runs-on: ubuntu-20.04 + timeout-minutes: 60 + strategy: + matrix: + node-version: [16.20.0] + steps: + - uses: actions/checkout@v2 + if: github.event_name == 'workflow_dispatch' + with: + ref: '${{ github.event.inputs.branch_name }}' + # + # Uncomment if using VPN + # + #- name: Install openconnect ppa + # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update + + #- name: Install openconnect + # run: sudo apt install -y openconnect + + #- name: Connect to VPN + # run: | + # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background + + #- name: Test if IP is reachable + # run: | + # ping -c4 ${{ secrets.SSH_HOST }} + + - name: Install SSH Key + uses: shimataro/ssh-key-action@v2 + with: + key: ${{ secrets.SSH_KEY }} + known_hosts: ${{ secrets.KNOWN_HOSTS }} + + - name: Check backups in ${{ github.event.inputs.environment }} + id: backup-check + env: + SSH_USER: ${{ secrets.SSH_USER }} + SSH_HOST: ${{ secrets.BACKUP_HOST }} + REMOTE_DIR: ${{ vars.BACKUP_DIRECTORY }} + LABEL: ${{ github.event.inputs.backup-label }} + run: | + yarn backup:check \ No newline at end of file diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml index 90bc2372..5a6bd185 100644 --- a/.github/workflows/deploy-prod.yml +++ b/.github/workflows/deploy-prod.yml @@ -57,6 +57,23 @@ jobs: cd opencrvs-core git checkout ${{ github.event.inputs.core-image-tag }} + # + # Uncomment if using VPN + # + #- name: Install openconnect ppa + # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update + + #- name: Install openconnect + # run: sudo apt install -y openconnect + + #- name: Connect to VPN + # run: | + # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background + + #- name: Test if IP is reachable + # run: | + # ping -c4 ${{ secrets.SSH_HOST }} + - name: Install SSH Key uses: shimataro/ssh-key-action@v2 with: @@ -67,7 +84,7 @@ jobs: uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + password: ${{ secrets.DOCKER_TOKEN }} - name: Wait for images to be available run: | @@ -96,6 +113,8 @@ jobs: ALERT_EMAIL: ${{ secrets.ALERT_EMAIL }} DOCKERHUB_ACCOUNT: ${{ secrets.DOCKERHUB_ACCOUNT }} DOCKERHUB_REPO: ${{ secrets.DOCKERHUB_REPO }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} KIBANA_USERNAME: ${{ secrets.KIBANA_USERNAME }} KIBANA_PASSWORD: ${{ secrets.KIBANA_PASSWORD }} MONGODB_ADMIN_USER: ${{ secrets.MONGODB_ADMIN_USER }} @@ -110,7 +129,19 @@ jobs: INFOBIP_API_KEY: ${{ secrets.INFOBIP_API_KEY }} SENDER_EMAIL_ADDRESS: ${{ secrets.SENDER_EMAIL_ADDRESS }} SUPER_USER_PASSWORD: ${{ secrets.SUPER_USER_PASSWORD }} + SSH_KEY: ${{ secrets.SSH_KEY }} + KNOWN_HOSTS: ${{ secrets.KNOWN_HOSTS }} CONTENT_SECURITY_POLICY_WILDCARD: ${{ vars.CONTENT_SECURITY_POLICY_WILDCARD }} + # SUDO_PASSWORD: ${{ secrets.SUDO_PASSWORD }} + # ELASTALERT_SLACK_WEBHOOK: ${{ secrets.ELASTALERT_SLACK_WEBHOOK }} run: | cd ./${{ github.event.repository.name }} - yarn deploy --clear_data=no --environment=${{ github.event.inputs.deploy-script-environment }} --host=${{ env.DOMAIN }} --version=${{ github.event.inputs.core-image-tag }} --country_config_version=${{ github.event.inputs.countryconfig-image-tag }} --country_config_path=../${{ github.event.repository.name }} --replicas=${{ env.REPLICAS }} + yarn deploy \ + --clear_data=no \ + --environment=${{ github.event.inputs.deploy-script-environment }} \ + --host=${{ env.DOMAIN }} \ + --ssh_host=${{ secrets.SSH_HOST }} \ + --ssh_user=${{ secrets.SSH_USER }} \ + --version=${{ github.event.inputs.core-image-tag }} \ + --country_config_version=${{ github.event.inputs.countryconfig-image-tag }} \ + --replicas=${{ env.REPLICAS }} diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 65914dcf..abec9b0d 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -60,6 +60,23 @@ jobs: cd opencrvs-core git checkout ${{ github.event.inputs.core-image-tag }} + # + # Uncomment if using VPN + # + #- name: Install openconnect ppa + # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update + + #- name: Install openconnect + # run: sudo apt install -y openconnect + + #- name: Connect to VPN + # run: | + # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background + + #- name: Test if IP is reachable + # run: | + # ping -c4 ${{ secrets.SSH_HOST }} + - name: Install SSH Key uses: shimataro/ssh-key-action@v2 with: @@ -70,7 +87,7 @@ jobs: uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + password: ${{ secrets.DOCKER_TOKEN }} - name: Wait for images to be available run: | @@ -99,6 +116,8 @@ jobs: ALERT_EMAIL: ${{ secrets.ALERT_EMAIL }} DOCKERHUB_ACCOUNT: ${{ secrets.DOCKERHUB_ACCOUNT }} DOCKERHUB_REPO: ${{ secrets.DOCKERHUB_REPO }} + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} KIBANA_USERNAME: ${{ secrets.KIBANA_USERNAME }} KIBANA_PASSWORD: ${{ secrets.KIBANA_PASSWORD }} MONGODB_ADMIN_USER: ${{ secrets.MONGODB_ADMIN_USER }} @@ -113,10 +132,22 @@ jobs: INFOBIP_API_KEY: ${{ secrets.INFOBIP_API_KEY }} SENDER_EMAIL_ADDRESS: ${{ secrets.SENDER_EMAIL_ADDRESS }} SUPER_USER_PASSWORD: ${{ secrets.SUPER_USER_PASSWORD }} + SSH_KEY: ${{ secrets.SSH_KEY }} + KNOWN_HOSTS: ${{ secrets.KNOWN_HOSTS }} CONTENT_SECURITY_POLICY_WILDCARD: ${{ vars.CONTENT_SECURITY_POLICY_WILDCARD }} + # SUDO_PASSWORD: ${{ secrets.SUDO_PASSWORD }} + # ELASTALERT_SLACK_WEBHOOK: ${{ secrets.ELASTALERT_SLACK_WEBHOOK }} run: | cd ./${{ github.event.repository.name }} - yarn deploy --clear_data=${{ github.event.inputs.reset }} --environment=${{ github.event.inputs.environment }} --host=${{ env.DOMAIN }} --version=${{ github.event.inputs.core-image-tag }} --country_config_version=${{ github.event.inputs.countryconfig-image-tag }} --country_config_path=../${{ github.event.repository.name }} --replicas=${{ env.REPLICAS }} + yarn deploy \ + --clear_data=${{ github.event.inputs.reset }} \ + --environment=${{ github.event.inputs.environment }} \ + --host=${{ env.DOMAIN }} \ + --ssh_host=${{ secrets.SSH_HOST }} \ + --ssh_user=${{ secrets.SSH_USER }} \ + --version=${{ github.event.inputs.core-image-tag }} \ + --country_config_version=${{ github.event.inputs.countryconfig-image-tag }} \ + --replicas=${{ env.REPLICAS }} seed-data: needs: deploy if: ${{ github.event.inputs.reset == 'yes' && needs.deploy.outputs.outcome == 'success' }} diff --git a/.github/workflows/provision.yml b/.github/workflows/provision.yml new file mode 100644 index 00000000..16706852 --- /dev/null +++ b/.github/workflows/provision.yml @@ -0,0 +1,150 @@ +name: Provision environment +run-name: Provision ${{ github.event.inputs.environment }} +on: + workflow_dispatch: + inputs: + environment: + type: choice + description: Machine to provision + default: qa + required: true + options: + - staging + - qa + - production + branch_name: + description: Branch to provision from + default: develop + required: true + +jobs: + provision: + environment: ${{ github.event.inputs.environment }} + runs-on: ubuntu-20.04 + outputs: + outcome: ${{ steps.deploy.outcome }} + timeout-minutes: 60 + strategy: + matrix: + node-version: [16.20.0] + steps: + - name: Clone country config resource package + uses: actions/checkout@v3 + with: + fetch-depth: 0 + path: './${{ github.event.repository.name }}' + + - name: Set environment type ENV_TYPE + run: | + if [ "${{ github.event.inputs.environment }}" == "production" ]; then + echo "ENV_TYPE=production" >> "$GITHUB_ENV" + else + echo "ENV_TYPE=qa" >> "$GITHUB_ENV" + fi + + # + # Uncomment if using VPN + # + #- name: Install openconnect ppa + # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update + + #- name: Install openconnect + # run: sudo apt install -y openconnect + + #- name: Connect to VPN + # run: | + # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background + + #- name: Test if IP is reachable + # run: | + # ping -c4 ${{ secrets.SSH_HOST }} + + - name: Set variables for ansible in production environments + id: ansible-production-variables + if: env.ENV_TYPE == 'production' + run: | + JSON_WITH_NEWLINES=$(cat<> $GITHUB_OUTPUT + env: + encrypted_disk_size: ${{ vars.DISK_SPACE }} + disk_encryption_key: ${{ secrets.ENCRYPTION_KEY }} + dockerhub_username: ${{ secrets.DOCKER_USERNAME }} + dockerhub_password: ${{ secrets.DOCKER_TOKEN }} + mongodb_admin_username: ${{ secrets.MONGODB_ADMIN_USER }} + mongodb_admin_password: ${{ secrets.MONGODB_ADMIN_PASSWORD }} + elasticsearch_superuser_password: ${{ secrets.ELASTICSEARCH_SUPERUSER_PASSWORD }} + external_backup_server_remote_directory: ${{ vars.BACKUP_DIRECTORY }} + external_backup_server_user: ${{ secrets.SSH_USER }} + external_backup_server_ssh_port: 22 + external_backup_server_ip: ${{ secrets.BACKUP_HOST }} + manager_production_server_ip: ${{ secrets.SSH_HOST }} + ansible_user: ${{ secrets.SSH_USER }} + # ansible_sudo_pass: ${{ secrets.SUDO_PASSWORD }} in case your user is not root + + - name: Set variables for ansible in qa environments + id: ansible-variables + if: env.ENV_TYPE == 'qa' + run: | + JSON_WITH_NEWLINES=$(cat<> $GITHUB_OUTPUT + env: + encrypted_disk_size: ${{ vars.DISK_SPACE }} + disk_encryption_key: ${{ secrets.ENCRYPTION_KEY }} + dockerhub_username: ${{ secrets.DOCKER_USERNAME }} + dockerhub_password: ${{ secrets.DOCKER_TOKEN }} + mongodb_admin_username: ${{ secrets.MONGODB_ADMIN_USER }} + mongodb_admin_password: ${{ secrets.MONGODB_ADMIN_PASSWORD }} + elasticsearch_superuser_password: ${{ secrets.ELASTICSEARCH_SUPERUSER_PASSWORD }} + # ansible_sudo_pass: ${{ secrets.SUDO_PASSWORD }} in case your user is not root + + # TODO: Iterate for 3 or 5 replicas + - name: Create ini file for 1 replica + id: ini-file + run: | + touch ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + echo "[docker-manager-first]" > ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + echo "manager1 ansible_host=\"${{ secrets.SSH_HOST }}\" ansible_user=${{ secrets.SSH_USER }} ansible_ssh_private_key_file=/tmp/server.pem" >> ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + echo "" >> ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + echo "[all:vars]" >> ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + echo "data1_hostname=${{ vars.HOSTNAME }}" >> ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + + - name: Check ini content + run: | + cat ${{ github.event.repository.name }}/infrastructure/server-setup/replicas-1.ini + + - name: Run playbook on 1 replica in qa + uses: dawidd6/action-ansible-playbook@v2 + if: vars.REPLICAS == 1 && env.ENV_TYPE == 'qa' + env: + ANSIBLE_PERSISTENT_COMMAND_TIMEOUT: 30 + ANSIBLE_SSH_TIMEOUT: 30 + ANSIBLE_SSH_RETRIES: 20 + with: + playbook: playbook-1.yml + directory: ${{ github.event.repository.name }}/infrastructure/server-setup + options: | + --verbose + --inventory replicas-1.ini + --extra-vars ""${{ steps.ansible-variables.outputs.EXTRA_VARS }}"" + + - name: Run playbook on 1 replica in production + uses: dawidd6/action-ansible-playbook@v2 + if: vars.REPLICAS == 1 && env.ENV_TYPE == 'production' + env: + ANSIBLE_PERSISTENT_COMMAND_TIMEOUT: 30 + ANSIBLE_SSH_TIMEOUT: 30 + ANSIBLE_SSH_RETRIES: 20 + with: + playbook: playbook-1.yml + directory: ${{ github.event.repository.name }}/infrastructure/server-setup + options: | + --verbose + --inventory replicas-1.ini + --extra-vars ""${{ steps.ansible-production-variables.outputs.EXTRA_VARS }}"" + diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml index f6b3c319..045de526 100644 --- a/.github/workflows/publish-release.yml +++ b/.github/workflows/publish-release.yml @@ -33,7 +33,7 @@ jobs: uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + password: ${{ secrets.DOCKER_TOKEN }} - name: Push image env: DOCKERHUB_ACCOUNT: ${{ secrets.DOCKERHUB_ACCOUNT }} diff --git a/.github/workflows/publish-to-dockerhub.yml b/.github/workflows/publish-to-dockerhub.yml index e910f23e..39e566ba 100644 --- a/.github/workflows/publish-to-dockerhub.yml +++ b/.github/workflows/publish-to-dockerhub.yml @@ -26,7 +26,7 @@ jobs: uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + password: ${{ secrets.DOCKER_TOKEN }} - name: Push image env: DOCKERHUB_ACCOUNT: ${{ secrets.DOCKERHUB_ACCOUNT }} diff --git a/.github/workflows/test-restore-backup.yml b/.github/workflows/test-restore-backup.yml new file mode 100644 index 00000000..6b13c4d5 --- /dev/null +++ b/.github/workflows/test-restore-backup.yml @@ -0,0 +1,92 @@ +name: Restore a QA or Production backup on QA +run-name: Restore a ${{ github.event.inputs.testing-prod-backup }} backup on QA +on: + workflow_dispatch: + inputs: + branch_name: + description: Branch to run workflow from + default: develop + required: true + qa-backup-label: + description: Your lowercase QA backup label + required: true + prod-backup-label: + description: Your lowercase prod backup label + testing-prod-backup: + type: choice + description: Whether to restore a qa or production backup + required: true + default: 'no' + options: + - 'qa' + - 'production' +jobs: + restore-backup: + environment: qa + runs-on: ubuntu-20.04 + timeout-minutes: 60 + strategy: + matrix: + node-version: [16.20.0] + steps: + - uses: actions/checkout@v2 + if: github.event_name == 'workflow_dispatch' + with: + ref: '${{ github.event.inputs.branch_name }}' + + # + # Uncomment if using VPN + # + #- name: Install openconnect ppa + # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update + + #- name: Install openconnect + # run: sudo apt install -y openconnect + + #- name: Connect to VPN + # run: | + # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background + + #- name: Test if IP is reachable + # run: | + # ping -c4 ${{ secrets.SSH_HOST }} + + - name: Install SSH Key + uses: shimataro/ssh-key-action@v2 + with: + key: ${{ secrets.SSH_KEY }} + known_hosts: ${{ secrets.KNOWN_HOSTS }} + + - name: Test a production backup on QA + id: production-backup-check + if: ${{ github.event.inputs.testing-prod-backup == 'production' }} + env: + SUDO_PASSWORD: ${{ secrets.VPN_PWD }} + SSH_USER: ${{ secrets.SSH_USER }} + SSH_HOST: ${{ secrets.SSH_HOST }} + BACKUP_HOST: ${{ secrets.BACKUP_HOST }} + BACKUP_DIRECTORY: ${{ vars.BACKUP_DIRECTORY }} + RESTORE_DIRECTORY: ${{ vars.RESTORE_DIRECTORY }} + REPLICAS: ${{ vars.REPLICAS }} + QA_BACKUP_LABEL: ${{ github.event.inputs.qa-backup-label }} + PROD_BACKUP_LABEL: ${{ github.event.inputs.prod-backup-label }} + REVERTING: "no" + run: | + bash infrastructure/test-restore-backup.sh + + - name: Restore a QA backup + id: qa-backup-check + if: ${{ github.event.inputs.testing-prod-backup == 'qa' }} + env: + SUDO_PASSWORD: ${{ secrets.VPN_PWD }} + SSH_USER: ${{ secrets.SSH_USER }} + SSH_HOST: ${{ secrets.SSH_HOST }} + BACKUP_HOST: ${{ secrets.BACKUP_HOST }} + BACKUP_DIRECTORY: ${{ vars.BACKUP_DIRECTORY }} + RESTORE_DIRECTORY: ${{ vars.RESTORE_DIRECTORY }} + REPLICAS: ${{ vars.REPLICAS }} + QA_BACKUP_LABEL: ${{ github.event.inputs.qa-backup-label }} + PROD_BACKUP_LABEL: "" + REVERTING: "yes" + run: | + bash infrastructure/test-restore-backup.sh \ No newline at end of file diff --git a/infrastructure/backup-check.sh b/infrastructure/backup-check.sh new file mode 100755 index 00000000..695346cf --- /dev/null +++ b/infrastructure/backup-check.sh @@ -0,0 +1,67 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. +# +# OpenCRVS is also distributed under the terms of the Civil Registration +# & Healthcare Disclaimer located at http://opencrvs.org/license. +# +# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS. + +#------------------------------------------------------------------------------------------------------------------ +# By default OpenCRVS saves a backup of all data on a cron job every day in case of an emergency data loss incident +# This cron job is already configured in the Ansible playbook.yml in the infrastructure > server-setup directory. +# Change SSH connection settings and IPs to suit your deployment, and re-run the Ansible script to update. +# A label string i.e. 'v1.0.1' can also be provided to the script to be appended to the filenames +#------------------------------------------------------------------------------------------------------------------ +set -e + +print_usage_and_exit() { + echo 'Usage: ./backup-check.sh with environment variables' + echo "Script will check if a backup has been successfully created for the given label" + exit 1 +} + + +if [ -z "$SSH_USER" ]; then + echo 'Error: Missing environment variable SSH_USER.' + print_usage_and_exit +fi +if [ -z "$SSH_HOST" ]; then + echo 'Error: Missing environment variable SSH_HOST.' + print_usage_and_exit +fi +if [ -z "$REMOTE_DIR" ]; then + echo 'Error: Missing environment variable REMOTE_DIR.' + print_usage_and_exit +fi +if [ -z "$LABEL" ]; then + echo 'Error: Missing environment variable LABEL.' + print_usage_and_exit +fi + +REMOTE_DIR="$REMOTE_DIR/${LABEL}" + +REMOTE_HEARTH_BACKUP=$REMOTE_DIR/mongo/hearth-dev-${LABEL}.gz +REMOTE_OPENHIM_BACKUP=$REMOTE_DIR/mongo/openhim-dev-${LABEL}.gz +REMOTE_USER_MGNT_BACKUP=$REMOTE_DIR/mongo/user-mgnt-${LABEL}.gz +REMOTE_APP_CONFIG_BACKUP=$REMOTE_DIR/mongo/application-config-${LABEL}.gz +REMOTE_METRICS_BACKUP=$REMOTE_DIR/mongo/metrics-${LABEL}.gz +REMOTE_WEBHOOKS_BACKUP=$REMOTE_DIR/mongo/webhooks-${LABEL}.gz +REMOTE_PERFORMANCE_BACKUP=$REMOTE_DIR/mongo/performance-${LABEL}.gz +REMOTE_MINIO_BACKUP=$REMOTE_DIR/minio/ocrvs-${LABEL}.tar.gz +REMOTE_METABASE_BACKUP=$REMOTE_DIR/metabase/ocrvs-${LABEL}.tar.gz +REMOTE_VSEXPORT_BACKUP=$REMOTE_DIR/vsexport/ocrvs-${LABEL}.tar.gz + + +# SSH into the remote server and check if the files exist +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_HEARTH_BACKUP' ] && echo '$REMOTE_HEARTH_BACKUP exists' || echo '$REMOTE_HEARTH_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_OPENHIM_BACKUP' ] && echo '$REMOTE_OPENHIM_BACKUP exists' || echo '$REMOTE_OPENHIM_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_USER_MGNT_BACKUP' ] && echo '$REMOTE_USER_MGNT_BACKUP exists' || echo '$REMOTE_USER_MGNT_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_APP_CONFIG_BACKUP' ] && echo '$REMOTE_APP_CONFIG_BACKUP exists' || echo '$REMOTE_APP_CONFIG_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_METRICS_BACKUP' ] && echo '$REMOTE_METRICS_BACKUP exists' || echo '$REMOTE_METRICS_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_WEBHOOKS_BACKUP' ] && echo '$REMOTE_WEBHOOKS_BACKUP exists' || echo '$REMOTE_WEBHOOKS_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_PERFORMANCE_BACKUP' ] && echo '$REMOTE_PERFORMANCE_BACKUP exists' || echo '$REMOTE_PERFORMANCE_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_METABASE_BACKUP' ] && echo '$REMOTE_METABASE_BACKUP exists' || echo '$REMOTE_METABASE_BACKUP does not exist'" +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_MINIO_BACKUP' ] && echo '$REMOTE_MINIO_BACKUP exists' || echo '$REMOTE_MINIO_BACKUP does not exist'" +echo 'VSExport backup will only exist 1 month after go live date:' +ssh "$SSH_USER@$SSH_HOST" "[ -e '$REMOTE_VSEXPORT_BACKUP' ] && echo '$REMOTE_VSEXPORT_BACKUP exists' || echo '$REMOTE_VSEXPORT_BACKUP does not exist'" diff --git a/infrastructure/create-github-environment.js b/infrastructure/create-github-environment.js new file mode 100644 index 00000000..c380e6b9 --- /dev/null +++ b/infrastructure/create-github-environment.js @@ -0,0 +1,226 @@ +const sodium = require('libsodium-wrappers') +const { Octokit } = require('@octokit/core') +const { writeFileSync } = require('fs') + +const config = { + environment: '', + repo: { + REPOSITORY_ID: '', + DOCKERHUB_ACCOUNT: '', // This may be a dockerhub organisation or the same as the username + DOCKERHUB_REPO: '', + DOCKER_USERNAME: process.env.DOCKER_USERNAME, + DOCKER_TOKEN: process.env.DOCKER_TOKEN + }, + ssh: { + KNOWN_HOSTS: process.env.KNOWN_HOSTS, + SSH_HOST: process.env.SSH_HOST, // IP address for the manager + SSH_USER: process.env.SSH_USER, + // SUDO_PASSWORD: process.env.SUDO_PASSWORD, // in case your user is not root + SSH_KEY: process.env.SSH_KEY // id_rsa + }, + infrastructure: { + DISK_SPACE: '', + HOSTNAME: '', // server machine hostname used when provisioning - TODO: Adapt to support 3 or 5 replicas + DOMAIN: '', // web hostname applied after all public subdomains in Traefik, + REPLICAS: '1' // TODO: Adapt to support 3 or 5 replicas + }, + services: { + SENTRY_DSN: process.env.SENTRY_DSN || '', + ELASTALERT_SLACK_WEBHOOK: process.env.ELASTALERT_SLACK_WEBHOOK || '', + INFOBIP_API_KEY: process.env.INFOBIP_API_KEY || '', + INFOBIP_GATEWAY_ENDPOINT: process.env.INFOBIP_GATEWAY_ENDPOINT || '', + INFOBIP_SENDER_ID: process.env.INFOBIP_SENDER_ID || '' // the name of the SMS sender e.g. OpenCRVS + }, + seeding: { + ACTIVATE_USERS: true, + AUTH_HOST: '', + COUNTRY_CONFIG_HOST: '', + GATEWAY_HOST: '' + }, + smtp: { + SMTP_HOST: process.env.SMTP_HOST || '', + SMTP_USERNAME: process.env.SMTP_USERNAME || '', + SMTP_PASSWORD: process.env.SMTP_PASSWORD || '', + EMAIL_API_KEY: process.env.EMAIL_API_KEY || '', + SMTP_PORT: '', + ALERT_EMAIL: '' + }, + vpn: { + // openconnect details for optional VPN + VPN_PROTOCOL: '', // e,g, fortinet, wireguard etc + VPN_HOST: process.env.VPN_HOST || '', + VPN_PORT: process.env.VPN_PORT || '', + VPN_USER: process.env.VPN_USER || '', + VPN_PWD: process.env.VPN_PWD || '', + VPN_SERVERCERT: process.env.VPN_SERVERCERT || '' + }, + whitelist: { + CONTENT_SECURITY_POLICY_WILDCARD: '', // e.g. *. + CLIENT_APP_URL: '', + LOGIN_URL: '' + }, + backup: { + BACKUP_HOST: process.env.BACKUP_HOST || '', + BACKUP_DIRECTORY: '', + qa: { + RESTORE_DIRECTORY: '' // If making use of script to restore a production backup on QA for regular monitoring + } + } +} + +const octokit = new Octokit({ + auth: process.env.GITHUB_TOKEN +}) + +async function createVariable(environment, name, value) { + await octokit.request( + `POST /repositories/${config.repo.REPOSITORY_ID}/environments/${config.environment}/variables`, + { + repository_id: config.repo.REPOSITORY_ID, + environment_name: environment, + name: name, + value: value, + headers: { + 'X-GitHub-Api-Version': '2022-11-28' + } + } + ) +} + +async function createSecret(environment, key, keyId, name, secret) { + //Check if libsodium is ready and then proceed. + await sodium.ready + + // Convert Secret & Base64 key to Uint8Array. + let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + let binsec = sodium.from_string(secret) + + //Encrypt the secret using LibSodium + let encBytes = sodium.crypto_box_seal(binsec, binkey) + + // Convert encrypted Uint8Array to Base64 + const encryptedValue = sodium.to_base64( + encBytes, + sodium.base64_variants.ORIGINAL + ) + + await octokit.request( + `PUT /repositories/${config.repo.REPOSITORY_ID}/environments/${environment}/secrets/${name}`, + { + repository_id: config.repo.REPOSITORY_ID, + environment_name: environment, + secret_name: name, + encrypted_value: encryptedValue, + key_id: keyId, + headers: { + 'X-GitHub-Api-Version': '2022-11-28' + } + } + ) +} + +async function getPublicKey(environment) { + await octokit.request( + `PUT /repos/${config.repo.DOCKERHUB_ACCOUNT}/${config.repo.DOCKERHUB_REPO}/environments/${environment}`, + { + headers: { + 'X-GitHub-Api-Version': '2022-11-28' + } + } + ) + + const res = await octokit.request( + `GET /repositories/${config.repo.REPOSITORY_ID}/environments/${environment}/secrets/public-key`, + { + owner: config.repo.DOCKERHUB_ACCOUNT, + repo: config.repo.DOCKERHUB_REPO, + headers: { + 'X-GitHub-Api-Version': '2022-11-28' + } + } + ) + + return res.data +} + +function generateLongPassword() { + const chars = + '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_' + let result = '' + for (var i = 16; i > 0; --i) + result += chars[Math.floor(Math.random() * chars.length)] + return result +} + +async function main() { + const { key, key_id } = await getPublicKey(config.environment) + let backupSecrets = {} + let backupVariables = {} + let vpnSecrets = {} + + if (process.argv.includes('--configure-backup')) { + backupSecrets = { + BACKUP_HOST: config.backup.BACKUP_HOST + } + backupVariables = { + BACKUP_DIRECTORY: config.backup.BACKUP_DIRECTORY, + RESTORE_DIRECTORY: config.backup.qa.RESTORE_DIRECTORY + } + } + + if (process.argv.includes('--configure-vpn')) { + vpnSecrets = { + ...config.vpn + } + } + + const SECRETS = { + ELASTICSEARCH_SUPERUSER_PASSWORD: generateLongPassword(), + ENCRYPTION_KEY: generateLongPassword(), + KIBANA_USERNAME: 'opencrvs-admin', + KIBANA_PASSWORD: generateLongPassword(), + MINIO_ROOT_PASSWORD: generateLongPassword(), + MINIO_ROOT_USER: generateLongPassword(), + MONGODB_ADMIN_PASSWORD: generateLongPassword(), + MONGODB_ADMIN_USER: generateLongPassword(), + SUPER_USER_PASSWORD: generateLongPassword(), + DOCKERHUB_ACCOUNT: config.repo.DOCKERHUB_ACCOUNT, + DOCKERHUB_REPO: config.repo.DOCKERHUB_REPO, + DOCKER_TOKEN: config.repo.DOCKER_TOKEN, + ...config.ssh, + ...config.smtp, + ...config.services, + ...backupSecrets, + ...vpnSecrets + } + const VARIABLES = { + ...config.infrastructure, + ...config.seeding, + ...config.whitelist, + ...backupVariables + } + writeFileSync( + './.secrets/' + config.environment + '.json', + JSON.stringify([SECRETS, VARIABLES], null, 2) + ) + if (process.argv.includes('--dry-run')) { + console.log('Dry run. Not creating secrets or variables.') + process.exit(0) + } else { + for (const [secretName, secretValue] of Object.entries(SECRETS)) { + await createSecret( + config.environment, + key, + key_id, + secretName, + secretValue + ) + } + + for (const [variableName, variableValue] of Object.entries(VARIABLES)) { + await createVariable(config.environment, variableName, variableValue) + } + } +} + +main() diff --git a/infrastructure/deploy.sh b/infrastructure/deploy.sh index 066f36bf..07170318 100755 --- a/infrastructure/deploy.sh +++ b/infrastructure/deploy.sh @@ -22,6 +22,14 @@ for i in "$@"; do HOST="${i#*=}" shift ;; + --ssh_host=*) + SSH_HOST="${i#*=}" + shift + ;; + --ssh_user=*) + SSH_USER="${i#*=}" + shift + ;; --environment=*) ENV="${i#*=}" shift @@ -58,7 +66,7 @@ function trapint { } print_usage_and_exit () { - echo 'Usage: ./deploy.sh --clear_data=yes|no --host --environment --version --country_config_version --replicas' + echo 'Usage: ./deploy.sh --clear_data=yes|no --host --environment --ssh_host --ssh_user --version --country_config_version --replicas' echo " --clear_data must have a value of 'yes' or 'no' set e.g. --clear_data=yes" echo " --environment can be 'production' or 'development' or 'qa' or 'demo'" echo ' --host is the server to deploy to' @@ -88,6 +96,14 @@ if [ -z "$VERSION" ] ; then print_usage_and_exit fi +if [ -z "$SSH_HOST" ] ; then + echo 'Error: Argument --ssh_host is required.' + print_usage_and_exit +fi + +if [ -z "$SSH_USER" ] ; then + echo 'Error: Argument --ssh_user is required.' + print_usage_and_exit if [ -z "$COUNTRY_CONFIG_VERSION" ] ; then echo 'Error: Argument --country_config_version is required.' @@ -179,6 +195,30 @@ if [ -z "$CONTENT_SECURITY_POLICY_WILDCARD" ] ; then print_usage_and_exit fi +if [ -z "$DOCKER_USERNAME" ] ; then + echo 'Error: Missing environment variable DOCKER_USERNAME.' + print_usage_and_exit +fi + +if [ -z "$SSH_KEY" ] ; then + echo 'Error: Missing environment variable SSH_KEY.' + print_usage_and_exit +fi + +if [ -z "$DOCKER_TOKEN" ] ; then + echo 'Error: Missing environment variable DOCKER_TOKEN.' + print_usage_and_exit +fi + +if [ -z "$KNOWN_HOSTS" ] ; then + echo 'Error: Missing environment variable KNOWN_HOSTS.' + print_usage_and_exit +fi + +if [ -z "$SUDO_PASSWORD" ] ; then + echo 'Info: Missing optional sudo password' +fi + if [ -z "$TOKENSEEDER_MOSIP_AUTH__PARTNER_MISP_LK" ] ; then echo 'Info: Missing optional MOSIP environment variable TOKENSEEDER_MOSIP_AUTH__PARTNER_MISP_LK.' TOKENSEEDER_MOSIP_AUTH__PARTNER_MISP_LK='' @@ -246,10 +286,6 @@ if [ -z "$SENDER_EMAIL_ADDRESS" ] ; then echo 'Info: Missing optional return sender email address environment variable SENDER_EMAIL_ADDRESS' fi - - -SSH_USER=${SSH_USER:-root} -SSH_HOST=${SSH_HOST:-$HOST} LOG_LOCATION=${LOG_LOCATION:-/var/log} (cd /tmp/ && curl -O https://raw.githubusercontent.com/opencrvs/opencrvs-core/$VERSION/docker-compose.yml) @@ -318,6 +354,13 @@ cp $BASEDIR/authorized_keys /tmp/opencrvs/infrastructure/authorized_keys # Copy metabase database cp $PARENT_DIR/src/api/dashboards/file/metabase.init.db.sql /tmp/opencrvs/infrastructure/metabase.init.db.sql +echo -e "$SSH_KEY" > /tmp/private_key_tmp +chmod 600 /tmp/private_key_tmp +echo -e "$KNOWN_HOSTS" > /tmp/known_hosts +chmod 600 /tmp/known_hosts +# Read private ssh key from SSH_KEY environment variable, convert to a public key and append to /tmp/opencrvs/infrastructure/authorized_keys file +echo $(ssh-keygen -y -f /tmp/private_key_tmp) >> /tmp/opencrvs/infrastructure/authorized_keys + rotate_authorized_keys() { # file exists and has a size of more than 0 bytes if [ -s "/tmp/opencrvs/infrastructure/authorized_keys" ]; then @@ -329,15 +372,18 @@ rotate_authorized_keys() { # Download base docker compose files to the server -rsync -rP /tmp/docker-compose* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ +sudo rsync -e 'ssh -o UserKnownHostsFile=/tmp/known_hosts -i /tmp/private_key_tmp' -rP /tmp/docker-compose* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ -rsync -rP $BASEDIR/docker-compose* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ +sudo rsync -e 'ssh -o UserKnownHostsFile=/tmp/known_hosts -i /tmp/private_key_tmp' -rP $BASEDIR/docker-compose* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ # Copy all country compose files to the server -rsync -rP $BASEDIR/docker-compose.countryconfig* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ +sudo rsync -e 'ssh -o UserKnownHostsFile=/tmp/known_hosts -i /tmp/private_key_tmp' -rP $BASEDIR/docker-compose.countryconfig* infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs/ # Override configuration files with country specific files -rsync -rP /tmp/opencrvs/infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs +sudo rsync -e 'ssh -o UserKnownHostsFile=/tmp/known_hosts -i /tmp/private_key_tmp' -rP /tmp/opencrvs/infrastructure $SSH_USER@$SSH_HOST:/opt/opencrvs + +# IF USING SUDO PASSWORD, YOU MAY NEED TO ADJUST COMMANDS LIKE THIS: +# ssh $SSH_USER@$SSH_HOST "echo $SUDO_PASSWORD | sudo -S rotate_secrets() { files_to_rotate=$1 @@ -346,7 +392,7 @@ rotate_secrets() { } # Setup configuration files and compose file for the deployment domain -ssh $SSH_USER@$SSH_HOST "SMTP_HOST=$SMTP_HOST SMTP_PORT=$SMTP_PORT SMTP_USERNAME=$SMTP_USERNAME SMTP_PASSWORD=$SMTP_PASSWORD ALERT_EMAIL=$ALERT_EMAIL MINIO_ROOT_USER=$MINIO_ROOT_USER MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD /opt/opencrvs/infrastructure/setup-deploy-config.sh $HOST | tee -a $LOG_LOCATION/setup-deploy-config.log" +ssh $SSH_USER@$SSH_HOST "SSH_USER=$SSH_USER SMTP_HOST=$SMTP_HOST SMTP_PORT=$SMTP_PORT SMTP_USERNAME=$SMTP_USERNAME SMTP_PASSWORD=$SMTP_PASSWORD ALERT_EMAIL=$ALERT_EMAIL MINIO_ROOT_USER=$MINIO_ROOT_USER MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD /opt/opencrvs/infrastructure/setup-deploy-config.sh $HOST | tee -a $LOG_LOCATION/setup-deploy-config.log" # Takes in a space separated string of docker-compose.yml files # returns a new line separated list of images defined in those files @@ -547,3 +593,12 @@ if [ $CLEAR_DATA == "yes" ] ; then ELASTICSEARCH_ADMIN_PASSWORD=$ELASTICSEARCH_SUPERUSER_PASSWORD \ /opt/opencrvs/infrastructure/run-migrations.sh" fi + +echo "Setting up Kibana config & alerts" + +while true; do + if ssh $SSH_USER@$SSH_HOST "ELASTICSEARCH_SUPERUSER_PASSWORD=$ELASTICSEARCH_SUPERUSER_PASSWORD HOST=kibana$HOST /opt/opencrvs/infrastructure/monitoring/kibana/setup-config.sh"; then + break + fi +sleep 5 +done \ No newline at end of file diff --git a/infrastructure/emergency-restore-metadata.sh b/infrastructure/emergency-restore-metadata.sh index 5f75d984..123fcd42 100755 --- a/infrastructure/emergency-restore-metadata.sh +++ b/infrastructure/emergency-restore-metadata.sh @@ -36,7 +36,11 @@ done print_usage_and_exit() { echo 'Usage: ./emergency-restore-metadata.sh --label=XXX --replicas=XXX' +<<<<<<< HEAD echo "This script CLEARS ALL DATA and RESTORES'S A SPECIFIC DAY'S or label's data. This process is irreversable, so USE WITH CAUTION." +======= + echo "This script CLEARS ALL DATA and RESTORES'S A SPECIFIC DAY'S or labels DATA. This process is irreversable, so USE WITH CAUTION." +>>>>>>> dd541744 (Infrastructure deployment, monitoring and maintenance updates) echo "Script must receive a label parameter to restore data from that specific day in format +%Y-%m-%d i.e. 2019-01-01 or that label" echo "The Hearth, OpenHIM User and Application-config db backup zips you would like to restore from: hearth-dev-{label}.gz, openhim-dev-{label}.gz, user-mgnt-{label}.gz and application-config-{label}.gz must exist in /data/backups/mongo/ folder" echo "The Elasticsearch backup folder /data/backups/elasticsearch must exist with all previous snapshots and indices. All files are required" diff --git a/infrastructure/monitoring/kibana/setup-config.sh b/infrastructure/monitoring/kibana/setup-config.sh new file mode 100755 index 00000000..b751e4cb --- /dev/null +++ b/infrastructure/monitoring/kibana/setup-config.sh @@ -0,0 +1,20 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. +# +# OpenCRVS is also distributed under the terms of the Civil Registration +# & Healthcare Disclaimer located at http://opencrvs.org/license. +# +# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS. + +set -e +curl --insecure --connect-timeout 60 -u elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD} -X POST https://$HOST/api/saved_objects/_import?overwrite=true -H 'kbn-xsrf: true' --form file=@/opt/opencrvs/infrastructure/monitoring/kibana/config.ndjson > /dev/null + +curl --insecure --connect-timeout 60 -u elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD} \ +https://$HOST/api/alerting/rules/_find\?page\=1\&per_page\=100\&default_search_operator\=AND\&sort_field\=name\&sort_order\=asc | \ +jq -r '.data[].id' | \ +while read -r id; do + echo "$id"; + curl --insecure --connect-timeout 60 -X POST -H 'kbn-xsrf: true' -u elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD} "https://$HOST/api/alerting/rule/$id/_disable" + curl --insecure --connect-timeout 60 -X POST -H 'kbn-xsrf: true' -u elastic:${ELASTICSEARCH_SUPERUSER_PASSWORD} "https://$HOST/api/alerting/rule/$id/_enable" +done \ No newline at end of file diff --git a/infrastructure/server-setup/playbook-3.yml b/infrastructure/server-setup/playbook-3.yml index 9dc2de81..75721ae8 100644 --- a/infrastructure/server-setup/playbook-3.yml +++ b/infrastructure/server-setup/playbook-3.yml @@ -386,6 +386,37 @@ copy: src: ../logrotate.conf dest: /etc/ + + - name: Give read and write permissions to logrotate.conf because we replace it on each deploy + file: + path: /etc/logrotate.conf + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create deploy logfile + ansible.builtin.file: + path: /var/log/setup-deploy-config.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create secret logfile + ansible.builtin.file: + path: /var/log/rotate-secrets.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create backup logfile + ansible.builtin.file: + path: /var/log/opencrvs-backup.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' - name: 'Save system logs to Papertrail' register: papaertrailSystemLogs @@ -412,6 +443,11 @@ rule: allow port: 4789 proto: udp + - name: Allow all access to tcp port 443 + ufw: + rule: allow + port: '443' + proto: tcp - name: 'Deny everything else and enable UFW' ufw: diff --git a/infrastructure/server-setup/playbook-5.yml b/infrastructure/server-setup/playbook-5.yml index d6b195f4..6f69a894 100644 --- a/infrastructure/server-setup/playbook-5.yml +++ b/infrastructure/server-setup/playbook-5.yml @@ -375,6 +375,37 @@ copy: src: ../logrotate.conf dest: /etc/ + + - name: Give read and write permissions to logrotate.conf because we replace it on each deploy + file: + path: /etc/logrotate.conf + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create deploy logfile + ansible.builtin.file: + path: /var/log/setup-deploy-config.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create secret logfile + ansible.builtin.file: + path: /var/log/rotate-secrets.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create backup logfile + ansible.builtin.file: + path: /var/log/opencrvs-backup.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' - name: 'Save system logs to Papertrail' register: papaertrailSystemLogs @@ -401,6 +432,11 @@ rule: allow port: 4789 proto: udp + - name: Allow all access to tcp port 443 + ufw: + rule: allow + port: '443' + proto: tcp - name: 'Deny everything else and enable UFW' ufw: diff --git a/infrastructure/setup-deploy-config.sh b/infrastructure/setup-deploy-config.sh index e0d1f615..ef40e56a 100755 --- a/infrastructure/setup-deploy-config.sh +++ b/infrastructure/setup-deploy-config.sh @@ -13,6 +13,9 @@ echo "Setting up deployment config for $1 - `date --iso-8601=ns`" # Set hostname in openhim-console config sed -i "s/{{hostname}}/$1/g" /opt/opencrvs/infrastructure/openhim-console-config.deploy.json +# Set ssh user in logrotate.conf +sed -i -e "s%{{SSH_USER}}%$SSH_USER%" /opt/opencrvs/infrastructure/logrotate.conf + # Set hostname in compose file sed -i "s/{{hostname}}/$1/g" /opt/opencrvs/docker-compose.deploy.yml diff --git a/infrastructure/test-restore-backup.sh b/infrastructure/test-restore-backup.sh new file mode 100755 index 00000000..1022aab7 --- /dev/null +++ b/infrastructure/test-restore-backup.sh @@ -0,0 +1,115 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at https://mozilla.org/MPL/2.0/. +# +# OpenCRVS is also distributed under the terms of the Civil Registration +# & Healthcare Disclaimer located at http://opencrvs.org/license. +# +# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS. + +#------------------------------------------------------------------------------------------------------------------ +# THIS SCRIPT RUNS FROM A GITHUB ACTION TO TEST RESTORE A BACKUP ONTO A QA SERVER AS A REGULAR MONITORING EXERCISE +#------------------------------------------------------------------------------------------------------------------ + +# Uncomment if your SSH user is not root +# +#if [ -z "$SUDO_PASSWORD" ] ; then +# echo 'Error: Missing environment variable SUDO_PASSWORD.' +# exit 1 +#fi + +if [ -z "$SSH_USER" ] ; then + echo 'Error: Missing environment variable SSH_USER.' + exit 1 +fi + +if [ -z "$SSH_HOST" ] ; then + echo 'Error: Missing environment variable SSH_HOST.' + exit 1 +fi + +if [ -z "$BACKUP_HOST" ] ; then + echo 'Error: Missing environment variable BACKUP_HOST.' + exit 1 +fi + +if [ -z "$BACKUP_DIRECTORY" ] ; then + echo 'Error: Missing environment variable BACKUP_DIRECTORY.' + exit 1 +fi + +if [ -z "$RESTORE_DIRECTORY" ] ; then + echo 'Error: Missing environment variable RESTORE_DIRECTORY.' + exit 1 +fi + +if [ -z "$REPLICAS" ] ; then + echo 'Error: Missing environment variable REPLICAS.' + exit 1 +fi + +if [ -z "$QA_BACKUP_LABEL" ] ; then + echo 'Error: Missing environment variable QA_BACKUP_LABEL.' + exit 1 +fi + +if [[ $REVERTING == "no" && -z "$PROD_BACKUP_LABEL" ]] ; then + echo 'Error: Missing environment variable PROD_BACKUP_LABEL when restoring a production backup.' + exit 1 +fi + +if [ -z "$REVERTING" ] ; then + echo 'Error: Missing environment variable REVERTING.' + exit 1 +fi + +if [ $REVERTING == "no" ] ; then + # Backup QA environment first + ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S bash /opt/opencrvs/infrastructure/emergency-backup-metadata.sh --ssh_user=$SSH_USER --ssh_host=$BACKUP_HOST --ssh_port=22 --production_ip=$SSH_HOST --remote_dir=$BACKUP_DIRECTORY/qa --replicas=$REPLICAS --label=$QA_BACKUP_LABEL" + LABEL="$PROD_BACKUP_LABEL" + REMOTE_DIR="$BACKUP_DIRECTORY/$LABEL" +else + LABEL="$QA_BACKUP_LABEL" + REMOTE_DIR="$BACKUP_DIRECTORY/qa/$LABEL" +fi + +# Copy production backup into restore folder +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/elasticsearch" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/elasticsearch/indices" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/influxdb" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/mongo" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/minio" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/metabase" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/vsexport" +ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/metabase" + +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/elasticsearch $RESTORE_DIRECTORY" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/hearth-dev-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/openhim-dev-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/user-mgnt-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/application-config-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/metrics-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/webhooks-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/performance-$LABEL.gz $RESTORE_DIRECTORY/mongo/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/influxdb/$LABEL $RESTORE_DIRECTORY/influxdb" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/minio/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/minio/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/metabase/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/metabase/" +ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/vsexport/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/vsexport/" + +# Restore +echo "Restoring" +# Uncomment if your SSH user is not root +# +# ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S rm -rf /data/backups/elasticsearch && mv $RESTORE_DIRECTORY/elasticsearch /data/backups/" +# else +ssh "$SSH_USER@$SSH_HOST" "rm -rf /data/backups/elasticsearch && mv $RESTORE_DIRECTORY/elasticsearch /data/backups/" + +ssh "$SSH_USER@$SSH_HOST" "docker service update --force --update-parallelism 1 --update-delay 30s opencrvs_elasticsearch" +echo "Waiting 2 mins for elasticsearch to restart." +echo +sleep 120 +# Uncomment if your SSH user is not root +# +# ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S bash /opt/opencrvs/infrastructure/emergency-restore-metadata.sh --label=$LABEL --replicas=$REPLICAS --backup-dir=$RESTORE_DIRECTORY" +# else +ssh "/opt/opencrvs/infrastructure/emergency-restore-metadata.sh --label=$LABEL --replicas=$REPLICAS --backup-dir=$RESTORE_DIRECTORY" \ No newline at end of file diff --git a/package.json b/package.json index 7dc0648b..cf583ff2 100644 --- a/package.json +++ b/package.json @@ -26,6 +26,7 @@ "deploy": "bash infrastructure/deploy.sh", "restore-snapshot": "bash infrastructure/restore-snapshot.sh", "snapshot": "bash infrastructure/emergency-backup-metadata.sh", + "backup:check": "bash infrastructure/backup-check.sh", "port-forward": "bash infrastructure/port-forward.sh", "validate-translations": "ts-node src/validate-translations.ts" }, @@ -40,7 +41,9 @@ "@types/handlebars": "^4.1.0", "@types/hapi__inert": "^5.2.3", "@types/jsonwebtoken": "^8.5.8", + "@types/libsodium-wrappers": "^0.7.10", "@types/react-intl": "^3.0.0", + "@octokit/core": "4.2.1", "@typescript-eslint/eslint-plugin": "^5.60.1", "@typescript-eslint/parser": "^5.60.1", "cypress-xpath": "^2.0.1", diff --git a/yarn.lock b/yarn.lock index 0df0a133..a9200f3e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2568,6 +2568,75 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@octokit/auth-token@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-3.0.4.tgz#70e941ba742bdd2b49bdb7393e821dea8520a3db" + integrity sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ== + +"@octokit/core@4.2.1": + version "4.2.1" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-4.2.1.tgz#fee6341ad0ce60c29cc455e056cd5b500410a588" + integrity sha512-tEDxFx8E38zF3gT7sSMDrT1tGumDgsw5yPG6BBh/X+5ClIQfMH/Yqocxz1PnHx6CHyF6pxmovUTOfZAUvQ0Lvw== + dependencies: + "@octokit/auth-token" "^3.0.0" + "@octokit/graphql" "^5.0.0" + "@octokit/request" "^6.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + before-after-hook "^2.2.0" + universal-user-agent "^6.0.0" + +"@octokit/endpoint@^7.0.0": + version "7.0.6" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.6.tgz#791f65d3937555141fb6c08f91d618a7d645f1e2" + integrity sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg== + dependencies: + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + universal-user-agent "^6.0.0" + +"@octokit/graphql@^5.0.0": + version "5.0.6" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.6.tgz#9eac411ac4353ccc5d3fca7d76736e6888c5d248" + integrity sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw== + dependencies: + "@octokit/request" "^6.0.0" + "@octokit/types" "^9.0.0" + universal-user-agent "^6.0.0" + +"@octokit/openapi-types@^18.0.0": + version "18.1.1" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-18.1.1.tgz#09bdfdabfd8e16d16324326da5148010d765f009" + integrity sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw== + +"@octokit/request-error@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-3.0.3.tgz#ef3dd08b8e964e53e55d471acfe00baa892b9c69" + integrity sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ== + dependencies: + "@octokit/types" "^9.0.0" + deprecation "^2.0.0" + once "^1.4.0" + +"@octokit/request@^6.0.0": + version "6.2.8" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.8.tgz#aaf480b32ab2b210e9dadd8271d187c93171d8eb" + integrity sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw== + dependencies: + "@octokit/endpoint" "^7.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + node-fetch "^2.6.7" + universal-user-agent "^6.0.0" + +"@octokit/types@^9.0.0": + version "9.3.2" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-9.3.2.tgz#3f5f89903b69f6a2d196d78ec35f888c0013cac5" + integrity sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA== + dependencies: + "@octokit/openapi-types" "^18.0.0" + "@parcel/watcher@^2.1.0": version "2.1.0" resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.1.0.tgz#5f32969362db4893922c526a842d8af7a8538545" @@ -3355,6 +3424,11 @@ resolved "https://registry.yarnpkg.com/@types/jwt-decode/-/jwt-decode-2.2.1.tgz#afdf5c527fcfccbd4009b5fd02d1e18241f2d2f2" integrity sha512-aWw2YTtAdT7CskFyxEX2K21/zSDStuf/ikI3yBqmwpwJF0pS+/IX5DWv+1UFffZIbruP6cnT9/LAJV1gFwAT1A== +"@types/libsodium-wrappers@^0.7.10": + version "0.7.13" + resolved "https://registry.yarnpkg.com/@types/libsodium-wrappers/-/libsodium-wrappers-0.7.13.tgz#769c4ea01de96bb297207586a70777ebf066dcb4" + integrity sha512-KeAKtlObirLJk/na6jHBFEdTDjDfFS6Vcr0eG2FjiHKn3Nw8axJFfIu0Y9TpwaauRldQBj/pZm/MHtK76r6OWg== + "@types/lodash@^4.14.117": version "4.14.186" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.186.tgz#862e5514dd7bd66ada6c70ee5fce844b06c8ee97" @@ -4102,6 +4176,11 @@ bcryptjs@^2.4.3: resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ== +before-after-hook@^2.2.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.2.3.tgz#c51e809c81a4e354084422b9b26bad88249c517c" + integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== + binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" @@ -5025,6 +5104,11 @@ dependency-graph@^0.11.0: resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" integrity sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg== +deprecation@^2.0.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919" + integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ== + detect-indent@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" @@ -6572,6 +6656,11 @@ is-plain-object@^2.0.3, is-plain-object@^2.0.4: dependencies: isobject "^3.0.1" +is-plain-object@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== + is-potential-custom-element-name@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" @@ -9992,6 +10081,11 @@ union-value@^1.0.0: is-extendable "^0.1.1" set-value "^2.0.1" +universal-user-agent@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" + integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== + universalify@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" From 007f1f9c363678b3521bc450352408f655fe2e26 Mon Sep 17 00:00:00 2001 From: euanmillar Date: Fri, 17 Nov 2023 14:38:12 +0000 Subject: [PATCH 3/3] Fixes to deploy scripts --- .github/workflows/test-restore-backup.yml | 92 ---------------- infrastructure/authorized_keys | 9 +- infrastructure/create-github-environment.js | 36 +++--- infrastructure/deploy.sh | 17 ++- infrastructure/docker-compose.deploy.yml | 3 + infrastructure/logrotate.conf | 10 +- infrastructure/server-setup/example-1.ini | 4 +- infrastructure/server-setup/playbook-1.yml | 49 +++++++++ infrastructure/setup-deploy-config.sh | 3 - infrastructure/test-restore-backup.sh | 115 -------------------- package.json | 1 + yarn.lock | 12 ++ 12 files changed, 109 insertions(+), 242 deletions(-) delete mode 100644 .github/workflows/test-restore-backup.yml delete mode 100755 infrastructure/test-restore-backup.sh diff --git a/.github/workflows/test-restore-backup.yml b/.github/workflows/test-restore-backup.yml deleted file mode 100644 index 6b13c4d5..00000000 --- a/.github/workflows/test-restore-backup.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Restore a QA or Production backup on QA -run-name: Restore a ${{ github.event.inputs.testing-prod-backup }} backup on QA -on: - workflow_dispatch: - inputs: - branch_name: - description: Branch to run workflow from - default: develop - required: true - qa-backup-label: - description: Your lowercase QA backup label - required: true - prod-backup-label: - description: Your lowercase prod backup label - testing-prod-backup: - type: choice - description: Whether to restore a qa or production backup - required: true - default: 'no' - options: - - 'qa' - - 'production' -jobs: - restore-backup: - environment: qa - runs-on: ubuntu-20.04 - timeout-minutes: 60 - strategy: - matrix: - node-version: [16.20.0] - steps: - - uses: actions/checkout@v2 - if: github.event_name == 'workflow_dispatch' - with: - ref: '${{ github.event.inputs.branch_name }}' - - # - # Uncomment if using VPN - # - #- name: Install openconnect ppa - # run: sudo add-apt-repository ppa:dwmw2/openconnect -y && sudo apt update - - #- name: Install openconnect - # run: sudo apt install -y openconnect - - #- name: Connect to VPN - # run: | - # echo "${{ secrets.VPN_PWD }}" | sudo openconnect -u ${{ secrets.VPN_USER }} --passwd-on-stdin --protocol=${{ secrets.VPN_PROTOCOL }} ${{ secrets.VPN_HOST }}:${{ secrets.VPN_PORT }} --servercert ${{ secrets.VPN_SERVERCERT }} --background - - #- name: Test if IP is reachable - # run: | - # ping -c4 ${{ secrets.SSH_HOST }} - - - name: Install SSH Key - uses: shimataro/ssh-key-action@v2 - with: - key: ${{ secrets.SSH_KEY }} - known_hosts: ${{ secrets.KNOWN_HOSTS }} - - - name: Test a production backup on QA - id: production-backup-check - if: ${{ github.event.inputs.testing-prod-backup == 'production' }} - env: - SUDO_PASSWORD: ${{ secrets.VPN_PWD }} - SSH_USER: ${{ secrets.SSH_USER }} - SSH_HOST: ${{ secrets.SSH_HOST }} - BACKUP_HOST: ${{ secrets.BACKUP_HOST }} - BACKUP_DIRECTORY: ${{ vars.BACKUP_DIRECTORY }} - RESTORE_DIRECTORY: ${{ vars.RESTORE_DIRECTORY }} - REPLICAS: ${{ vars.REPLICAS }} - QA_BACKUP_LABEL: ${{ github.event.inputs.qa-backup-label }} - PROD_BACKUP_LABEL: ${{ github.event.inputs.prod-backup-label }} - REVERTING: "no" - run: | - bash infrastructure/test-restore-backup.sh - - - name: Restore a QA backup - id: qa-backup-check - if: ${{ github.event.inputs.testing-prod-backup == 'qa' }} - env: - SUDO_PASSWORD: ${{ secrets.VPN_PWD }} - SSH_USER: ${{ secrets.SSH_USER }} - SSH_HOST: ${{ secrets.SSH_HOST }} - BACKUP_HOST: ${{ secrets.BACKUP_HOST }} - BACKUP_DIRECTORY: ${{ vars.BACKUP_DIRECTORY }} - RESTORE_DIRECTORY: ${{ vars.RESTORE_DIRECTORY }} - REPLICAS: ${{ vars.REPLICAS }} - QA_BACKUP_LABEL: ${{ github.event.inputs.qa-backup-label }} - PROD_BACKUP_LABEL: "" - REVERTING: "yes" - run: | - bash infrastructure/test-restore-backup.sh \ No newline at end of file diff --git a/infrastructure/authorized_keys b/infrastructure/authorized_keys index 4f3e7723..4559db7f 100644 --- a/infrastructure/authorized_keys +++ b/infrastructure/authorized_keys @@ -1,6 +1,5 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDYclwfSSTNDFaxX+tSZRbxw1kuTuhw5E3XvYC9HvOv828fMLlUEklSt+lfQLI8qaPqHO4NQaW0P4a3BM/IVoXc8jXyaItr7i143RL/lRy7wERiZJ83EKd8cb3pN5u3tG3ciAzA98xllUNPOuireZ7HYXqS/96/ITz3lZtWZ1QI413APee9le8z6VfPXgb/e+EVYvsqJGRKa5eWZMvkzzUnkI7cFGCxTM/sS0Aez1RCy7hZbBG9pOVOdHswGEXQQgwJTrigRAwp1HEhsbTgVHHlDnZlM+gmcjGpW7OgTDKGDgKqsHJp/uIHXftCa+/cSllA8xeLPrv0IWpmXD98HQybl32oU21XRKutR0EvOAblzLCS0SPKb9nuY8aAmnrt9Si9UyJxMeMOoaABs5/XAGHmSGrzKgbxXyNMrd9t21hF11PWi3PM/SMj9LGyW9+aw8qDMhUmTuzbfxoklESp61aik8QqLS9z8Pauq4XfPUhMPX/1H1Raa/TPPHOC2aKOhrU= nafiu@DESKTOP-K9VRGPP ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDECqHO65UpyrrO8uueD06RxGaVVq22f152Rf8qVQQAAIGAMu6gCs7ztlZ8a3yQgSEIjM/Jl1/RqIVs6CziTEef74nLFTZ5Ufz3CLRVgdebBeSBEmhTfTUV0HLkSyNzwKFpuzJxucGd72ulPvEp6eHvyJAPJz37YcU8cjaL1v05T6s2ee99li35GlDDtCzfjVV4ZPAg5JdfWuTj41RAVC0LQhk2/NB4qEu37UxGGjhRFSjBEsS5LxI9QfvgrsHpl/VOn+soH7ZkK7kS6qRgNP/uYsXRWXhHaamcl5OX68gJWTbrW6c7PCqlbCWGnsHJswCmqPIthwXXMfC7ULDNLSKG6mslAt5Dyc8/MCr3vTW7pDyr2d0FvvY86SMQUggxv3qF7TZewqfX1bhK0fMLarIxVMQ1RFo//wN9QGA+2we8rxd2Y1Kr1DBuJyuwXPfv+Exo8yNYQ+x/AYH5k6UVcSYuaB8eYmplG2KQCxt8RBFtoChrwOKNRWLqXdKyfpdp5XmnnWxPvR95gf3h3yLocVYkF0i0uvKKJ0vt8J0Ezfkdfow0B1kUg5bPXKJROX7PwbaCPdYcxyDaO6wwOigRnSmoFvkH1pLb4j1RQAXcX531CHgfN6Izi/h0mpMS4bnyIUcv2GQr+h4z4TxcCtj7qpH2y6yw7XG12jVh7TfeesXG2Q== euanmillar77@gmail.com -ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGUprcQyUFYwRto0aRpgriR95C1pgNxrQ0lEWEe1D8he haidertameem@gmail.com -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDH6qZgtXhWf+xKwpWmHez6F44VRWrUJPF/aa+qdAc+EUNcUUNq8a/KflVthI15GW83VdHSgOCqrqnZTl/shfjOlq9FxsCDI9BY1Zt2+Dc/YMiY4519jM8QEUpZllJTAsBbaz3MSfqda67lEY8sQp9Jk5hw3vUOYxPBtHLSBehwnj4rNnobRrQ4YeOPnLry+cwf8tuQ1ftaxmsKeSKc8blToj2zJHe5t2a5CkOmCOCjAoToVVHWtUZwZ8E3Xrwdyod1q3vnNjofHPr8TFYpuJlAaIAtko4w8XaeQAbfz+iWGpOSYjbaMhG3gq05kfTm3XUEUsbwSeWhZ8d7F0XDESjHDELQYcikHgm6ywnUtpMht16rbjnZ+h8mDlE/Ftz9N9gkzn7bTTMYRRUrjJUTppH9opNbzlcr38zAhNDEeRvGIhxx6/jRZ0xk1SqQhYh08M3URpvIDbh/Umm7NO/cN6ZX8ogrrMSRfoYvk2u4gioX5qfyIDSVwiVztMjkPdZ/rhU= pyry@opencrvs.org -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDcVKepFZMsfxinL9vEbR7l9goXFTbK80s746USf5jHrFTuTDsrsa5Jq+HQaSjLHq/ni6FIq2BJ0+wdk2paqXHO41tBKjKQMd88PkGLwaM/0jNnXdSrva0XWHKh3OFxl0RsF803r9gSs3K2As3nUZlvhRDDKyE7XfQ5J/irdADylWjBwjVdIIuTaxd0ZSSoZ9FIQlzOT9qbr3xvZjYFpFXaTmQPq4cTRnl78Kq+H6BFyAey3LYjK91ZrbwUfx11QTT0Xb8yxMSTSU8UX2VHi1t+tWBefz3Qs6V+xwNLfZjhxef/9jvF65b2tK7w5aZaJ95cxaW+TL0tRecaixEwoVAl/AvWuW3NvlBSuonwkw5v+rPAyT0/Zr/CTMJcGu3VFwsvDWVraYMwi2g4Nmkl4xi9HZTxSiHGvdmWRVY+UV950eCZ2oYDO460xSyEHzrOqSQr4WluZtrwq+Iv9bb7OIGUjuSmkWocoSqsYKHY7d6KRjAhKIt6aSry6BmOE29w1hB1wo8w9DhCQdMHD0eCyFS7aM/VjqJo0whxgfkroujrgB1Pw/hSvBWc0svbhJOY/znjvJiJTjMb6Iq1GmCizf+C+lJAZLLamH2l06V9SUcq4kJyCVsWbW0ePME5M6gMIeT9vpMZvi1Ja/9bMUttvkq+muiyld4jzkliWud39ZAxSw== github-actions-deployment@opencrvs.org -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC7m25EEqcOEBTnNTTtB+Rn2/djWVQOB+O3g/zBhDKZ/lZP6ow+44m4Ud6i5jKiYitxo5eLJoI4vc+HNe8a9Jj7g5xi830e/67fRtMxZexJFEQOZBCb5eZ+VZbeVy7NiQIITQSXONvHWMwVvDzYKod+rwlLh04hDVmIg/yVAny0/wvNqczQoyOcCi9QCKqby3IeG81sUlX0gtwjd6XwxBHvC3HCpKT6DbS8bQSQUP6DDtiAHUyvlxmdTNmmfRP7UgmsY2w5czUiojzygZdiNpgRt82ueXQ4bozZ79ouS62XMf887dGAAKimiooQ/FCjfvDJzU0V6TI/7t3jQr01wwMX523bjxCZQ9ZGb8IBP2wJpWdUl6tU7ytrJnSxRhOLTowPUzwNRL6ggzfoYn0RyK8dXRkaBKVWZAYMgsfNT71XWKqtk24MG3UipLn74oQnvP1T2NFZo0NJf3JkjykMdA+umDAUIRDW6kOCqqzQ8M4fN6a9jgZFa0qbPSNSIuxQWx0= pyry+macbook@opencrvs.org -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDWIF63S4f3z9wQMvWibmvl7MPuJ6EVrkP0HuvgNhcs/4DZYMcR/GRBvV4ldOSYMlBevIXycgGzNDxKJgENUuwIWanjBu7uVAHyD6+cIRD1h63qq7Cjv/2HYTfBDKOrKzPOhA6zWvKO0ZGWsjRXk5LWMCbKOkvKJCxOpj/NVBxeE4FTK5YADYPV3OSsmBtqTHrVLm2sMmShU/2hMYYswWkobidjX65+nK/X+3C+yJbHwiydVvn+QCrFlFfCLPWKe8rUpOxyxofPqWVQh6CHhHfT8okaOc9sOE8Qeip9ljo84DftJh3Xm3ynOdWK1hH2BvRvxNadWqcE1qECbkg4tx2x riku.rouvila@gmail.com +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDD1cap8psIi1wBHMKEufhXkME0MGxNifINbdJTbWyeJzQvYJALUHYAi1vt37jEXp9QvvmzUZry4Rh8zW598ZkXGhRejGXCx5kkXYYT2YX4xMTuoRmg/dxxkIpXwDkDxU06WUUw1ObRX8wbjJvO/r5QLYReUtR4R5O6Ac7WPe9KzdGrbm7Whqa+WOrSsXLmoqtqW1EuLKOKP73Ajwe8bSe2sghC42G2sBw9bb4sAUlAzdhC7XC9pIUTC7IRCfyvNt0wjv+BrnUexT1TztdQnTuQe65PHmhA6UFp9JBKwhwhqpm7idXyLZruEd4Y+aLN/aHWeQQhOp0lydzxAJerQ/KsLTUlS04kOnoeiUENrra6ItY0mW2C3iCO23Nm3wsUtXSxTze0CebUK2Ly1eO5gDNE4bOoMK+N/OjV1S/7fHeAXewCyyVNjL/0m6Jx0aDFTshxFFEfswR/wUIxwPogetcpyiV3CG2Mww/tO2j2Eyw7qZLKBhUOuz/xaeTRIb3FhS0= asaph@asaph-HP-ProBook-450-15-6-inch-G9-Notebook-PC +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDKHo6hJmE2ww1/thnZ6AEHNLCyTvZT1aMGoxNsvmmemLtDe9JvSB8CN874L+OzgEgHkI2MNm4u6MQW1c7evw043hSEjijzr964xsG7l1+GXasH1/p+6Tz/KDYNW31nnD7xakmZmoDTAkEFDj7IDL5vG2paw30rLvsvC526Zh8C12ndj8dz+b6h/k3ndGAdMzVPzvh55UwaX2jktXDOxQj+8ncelMWfIFStRYlwxB47a3CMBNycvhGSw5aUivKCxog10nIepgyHy4kjEI5+uBOcwUAkQ35KFO6aqHVt7uoLcA0pmeJGFrQLKGuAKWRF5z686UpT15ZaFC02ezySmAjQv7P5JFcuyy2cop6c+uTOi2DmvS+EUS55ggUyIk/uwexOiWuUteHrTw9ywUNI/PIgFUkCAOWl7cpasF3XR5tsHOLCPPW71pimwRK1QO/cwT5aVBy8ISVOzdSsuM3brMDJWLFOpJpQ+Lw4gvc2QiIJgVuPY9g6AoUeo/4I6TeV4Bs= aline@aline-HP-ProBook-450-15-6-inch-G9-Notebook-PC + diff --git a/infrastructure/create-github-environment.js b/infrastructure/create-github-environment.js index c380e6b9..90ef6d15 100644 --- a/infrastructure/create-github-environment.js +++ b/infrastructure/create-github-environment.js @@ -3,11 +3,13 @@ const { Octokit } = require('@octokit/core') const { writeFileSync } = require('fs') const config = { - environment: '', + environment: 'test', repo: { - REPOSITORY_ID: '', - DOCKERHUB_ACCOUNT: '', // This may be a dockerhub organisation or the same as the username - DOCKERHUB_REPO: '', + REPOSITORY_ID: '701240725', + REPOSITORY_ACCOUNT: 'opencrvs', + REPOSITORY_NAME: 'opencrvs-cmu', + DOCKERHUB_ACCOUNT: 'cylabinterop', // This may be a dockerhub organisation or the same as the username + DOCKERHUB_REPO: 'opencrvs-cmu', DOCKER_USERNAME: process.env.DOCKER_USERNAME, DOCKER_TOKEN: process.env.DOCKER_TOKEN }, @@ -19,23 +21,25 @@ const config = { SSH_KEY: process.env.SSH_KEY // id_rsa }, infrastructure: { - DISK_SPACE: '', - HOSTNAME: '', // server machine hostname used when provisioning - TODO: Adapt to support 3 or 5 replicas - DOMAIN: '', // web hostname applied after all public subdomains in Traefik, + DISK_SPACE: '60g', + HOSTNAME: 'upanzivm03', // server machine hostname used when provisioning - TODO: Adapt to support 3 or 5 replicas + DOMAIN: 'opencrvs.africa.cmu.edu', // web hostname applied after all public subdomains in Traefik, REPLICAS: '1' // TODO: Adapt to support 3 or 5 replicas }, services: { SENTRY_DSN: process.env.SENTRY_DSN || '', - ELASTALERT_SLACK_WEBHOOK: process.env.ELASTALERT_SLACK_WEBHOOK || '', + ELASTALERT_SLACK_WEBHOOK: + process.env.ELASTALERT_SLACK_WEBHOOK || + 'https://hooks.slack.com/services/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', INFOBIP_API_KEY: process.env.INFOBIP_API_KEY || '', INFOBIP_GATEWAY_ENDPOINT: process.env.INFOBIP_GATEWAY_ENDPOINT || '', INFOBIP_SENDER_ID: process.env.INFOBIP_SENDER_ID || '' // the name of the SMS sender e.g. OpenCRVS }, seeding: { ACTIVATE_USERS: true, - AUTH_HOST: '', - COUNTRY_CONFIG_HOST: '', - GATEWAY_HOST: '' + AUTH_HOST: 'https://auth.opencrvs.africa.cmu.edu', + COUNTRY_CONFIG_HOST: 'https://countryconfig.opencrvs.africa.cmu.edu', + GATEWAY_HOST: 'https://gateway.opencrvs.africa.cmu.edu' }, smtp: { SMTP_HOST: process.env.SMTP_HOST || '', @@ -55,13 +59,13 @@ const config = { VPN_SERVERCERT: process.env.VPN_SERVERCERT || '' }, whitelist: { - CONTENT_SECURITY_POLICY_WILDCARD: '', // e.g. *. - CLIENT_APP_URL: '', - LOGIN_URL: '' + CONTENT_SECURITY_POLICY_WILDCARD: '*.opencrvs.africa.cmu.edu', // e.g. *. + CLIENT_APP_URL: 'https://register.opencrvs.africa.cmu.edu', + LOGIN_URL: 'https://login.opencrvs.africa.cmu.edu' }, backup: { BACKUP_HOST: process.env.BACKUP_HOST || '', - BACKUP_DIRECTORY: '', + BACKUP_DIRECTORY: '/home/opencrvs/backups', qa: { RESTORE_DIRECTORY: '' // If making use of script to restore a production backup on QA for regular monitoring } @@ -121,7 +125,7 @@ async function createSecret(environment, key, keyId, name, secret) { async function getPublicKey(environment) { await octokit.request( - `PUT /repos/${config.repo.DOCKERHUB_ACCOUNT}/${config.repo.DOCKERHUB_REPO}/environments/${environment}`, + `PUT /repos/${config.repo.REPOSITORY_ACCOUNT}/${config.repo.REPOSITORY_NAME}/environments/${environment}`, { headers: { 'X-GitHub-Api-Version': '2022-11-28' diff --git a/infrastructure/deploy.sh b/infrastructure/deploy.sh index 07170318..c6873b3f 100755 --- a/infrastructure/deploy.sh +++ b/infrastructure/deploy.sh @@ -104,6 +104,7 @@ fi if [ -z "$SSH_USER" ] ; then echo 'Error: Argument --ssh_user is required.' print_usage_and_exit +fi if [ -z "$COUNTRY_CONFIG_VERSION" ] ; then echo 'Error: Argument --country_config_version is required.' @@ -354,6 +355,9 @@ cp $BASEDIR/authorized_keys /tmp/opencrvs/infrastructure/authorized_keys # Copy metabase database cp $PARENT_DIR/src/api/dashboards/file/metabase.init.db.sql /tmp/opencrvs/infrastructure/metabase.init.db.sql +# Copy logrotate.conf +cp $BASEDIR/logrotate.conf /tmp/opencrvs/infrastructure/logrotate.conf + echo -e "$SSH_KEY" > /tmp/private_key_tmp chmod 600 /tmp/private_key_tmp echo -e "$KNOWN_HOSTS" > /tmp/known_hosts @@ -364,7 +368,8 @@ echo $(ssh-keygen -y -f /tmp/private_key_tmp) >> /tmp/opencrvs/infrastructure/au rotate_authorized_keys() { # file exists and has a size of more than 0 bytes if [ -s "/tmp/opencrvs/infrastructure/authorized_keys" ]; then - ssh $SSH_USER@$SSH_HOST 'cat /opt/opencrvs/infrastructure/authorized_keys > ~/.ssh/authorized_keys' + ROTATE_KEYS_COMMAND='cat /opt/opencrvs/infrastructure/authorized_keys > ~/.ssh/authorized_keys' + ssh $SSH_USER@$SSH_HOST "echo $SUDO_PASSWORD | sudo -S $ROTATE_KEYS_COMMAND" else echo "File /tmp/opencrvs/infrastructure/authorized_keys is empty. Did not rotate authorized keys!" fi @@ -392,7 +397,11 @@ rotate_secrets() { } # Setup configuration files and compose file for the deployment domain -ssh $SSH_USER@$SSH_HOST "SSH_USER=$SSH_USER SMTP_HOST=$SMTP_HOST SMTP_PORT=$SMTP_PORT SMTP_USERNAME=$SMTP_USERNAME SMTP_PASSWORD=$SMTP_PASSWORD ALERT_EMAIL=$ALERT_EMAIL MINIO_ROOT_USER=$MINIO_ROOT_USER MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD /opt/opencrvs/infrastructure/setup-deploy-config.sh $HOST | tee -a $LOG_LOCATION/setup-deploy-config.log" +ssh $SSH_USER@$SSH_HOST "SSH_USER=$SSH_USER MINIO_ROOT_USER=$MINIO_ROOT_USER MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD /opt/opencrvs/infrastructure/setup-deploy-config.sh $HOST | tee -a $LOG_LOCATION/setup-deploy-config.log" + +ssh $SSH_USER@$SSH_HOST "echo $SUDO_PASSWORD | sudo -S mv /opt/opencrvs/infrastructure/logrotate.conf /etc/logrotate.conf" + +ssh $SSH_USER@$SSH_HOST "echo $SUDO_PASSWORD | sudo -S mv /opt/opencrvs/infrastructure/metabase.init.db.sql /data/metabase/metabase.init.db.sql" # Takes in a space separated string of docker-compose.yml files # returns a new line separated list of images defined in those files @@ -583,7 +592,7 @@ if [ $CLEAR_DATA == "yes" ] ; then ELASTICSEARCH_ADMIN_PASSWORD=$ELASTICSEARCH_SUPERUSER_PASSWORD \ MONGODB_ADMIN_USER=$MONGODB_ADMIN_USER \ MONGODB_ADMIN_PASSWORD=$MONGODB_ADMIN_PASSWORD \ - /opt/opencrvs/infrastructure/clear-all-data.sh $REPLICAS" + /opt/opencrvs/infrastructure/clear-all-data.sh $REPLICAS $ENV $SUDO_PASSWORD" echo echo "Running migrations..." @@ -597,7 +606,7 @@ fi echo "Setting up Kibana config & alerts" while true; do - if ssh $SSH_USER@$SSH_HOST "ELASTICSEARCH_SUPERUSER_PASSWORD=$ELASTICSEARCH_SUPERUSER_PASSWORD HOST=kibana$HOST /opt/opencrvs/infrastructure/monitoring/kibana/setup-config.sh"; then + if ssh $SSH_USER@$SSH_HOST "ELASTICSEARCH_SUPERUSER_PASSWORD=$ELASTICSEARCH_SUPERUSER_PASSWORD HOST=kibana.$HOST /opt/opencrvs/infrastructure/monitoring/kibana/setup-config.sh"; then break fi sleep 5 diff --git a/infrastructure/docker-compose.deploy.yml b/infrastructure/docker-compose.deploy.yml index c43a0ee6..c52efafe 100644 --- a/infrastructure/docker-compose.deploy.yml +++ b/infrastructure/docker-compose.deploy.yml @@ -320,6 +320,9 @@ services: - /opt/opencrvs/infrastructure/mc-config:/root/.mc networks: - overlay_net + deploy: + labels: + - 'traefik.enable=false' logging: driver: gelf options: diff --git a/infrastructure/logrotate.conf b/infrastructure/logrotate.conf index b70cf46c..1e1d7f9c 100644 --- a/infrastructure/logrotate.conf +++ b/infrastructure/logrotate.conf @@ -47,35 +47,35 @@ include /etc/logrotate.d /var/log/opencrvs-backup.log { missingok monthly - create 0660 root utmp + create 0660 {{SSH_USER}} utmp rotate 1 } /var/log/setup-deploy-config.log { missingok monthly - create 0660 root utmp + create 0660 {{SSH_USER}} utmp rotate 1 } /var/log/rotate-secrets.log { missingok monthly - create 0660 root utmp + create 0660 {{SSH_USER}} utmp rotate 1 } /var/log/opencrvs-backup-service.log { missingok monthly - create 0660 root utmp + create 0660 {{SSH_USER}} utmp rotate 1 } /var/log/papertrail.log { missingok monthly - create 0660 root utmp + create 0660 {{SSH_USER}} utmp rotate 1 } diff --git a/infrastructure/server-setup/example-1.ini b/infrastructure/server-setup/example-1.ini index 3f2777aa..b2a25cff 100644 --- a/infrastructure/server-setup/example-1.ini +++ b/infrastructure/server-setup/example-1.ini @@ -8,11 +8,11 @@ ; Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS. [docker-manager-first] ; Uncomment the line below -; manager1 ansible_host="ENTER YOUR MANAGER HOST IP" +manager1 ansible_host="172.29.108.85" ansible_user=opencrvsusr ansible_ssh_private_key_file=~/.ssh/id_rsa ; Below you can assign 1 node to be the data node, use the node's HOSTNAME in these variables. ; These node will be used by databases to permanently store data. ; Used for Mongo replica sets [all:vars] ; Uncomment the line below -; data1_hostname=ENTER_HOSTNAME_1 +data1_hostname=upanzivm03 diff --git a/infrastructure/server-setup/playbook-1.yml b/infrastructure/server-setup/playbook-1.yml index 2ecf4ce7..1baa6489 100644 --- a/infrastructure/server-setup/playbook-1.yml +++ b/infrastructure/server-setup/playbook-1.yml @@ -19,6 +19,44 @@ file: path: /opt/opencrvs state: directory + + - name: Give read and write permissions to /opt/opencrvs + file: + path: /opt/opencrvs + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Give read and write permissions to logrotate.conf because we replace it on each deploy + file: + path: /etc/logrotate.conf + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create deploy logfile + ansible.builtin.file: + path: /var/log/setup-deploy-config.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create secret logfile + ansible.builtin.file: + path: /var/log/rotate-secrets.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' + + - name: Create backup logfile + ansible.builtin.file: + path: /var/log/opencrvs-backup.log + owner: '{{ ansible_user }}' + group: '{{ ansible_user }}' + state: touch + mode: 'u+rwX,g+rwX,o-rwx' - name: 'Check mandatory variables are defined' assert: @@ -36,6 +74,17 @@ elasticsearch_superuser_password={{elasticsearch_superuser_password}} disk_encryption_key={{ disk_encryption_key }} + - name: Ensure group "docker" exists + ansible.builtin.group: + name: docker + state: present + + - name: Add user to Docker group + user: + name: '{{ ansible_user }}' + groups: "docker, sudo" + append: yes + - name: 'Add docker repository key' apt_key: url: https://download.docker.com/linux/ubuntu/gpg diff --git a/infrastructure/setup-deploy-config.sh b/infrastructure/setup-deploy-config.sh index ef40e56a..33132e88 100755 --- a/infrastructure/setup-deploy-config.sh +++ b/infrastructure/setup-deploy-config.sh @@ -23,9 +23,6 @@ sed -i "s/{{hostname}}/$1/g" /opt/opencrvs/docker-compose.deploy.yml KIBANA_ENCRYPTION_KEY=`uuidgen` sed -i "s/{{KIBANA_ENCRYPTION_KEY}}/$KIBANA_ENCRYPTION_KEY/g" /opt/opencrvs/infrastructure/monitoring/kibana/kibana.yml -# Move metabase file -mv /opt/opencrvs/infrastructure/metabase.init.db.sql /data/metabase/metabase.init.db.sql - # Replace environment variables from all alert definition files for file in /opt/opencrvs/infrastructure/monitoring/elastalert/rules/*.yaml; do sed -i -e "s%{{HOST}}%$1%" $file diff --git a/infrastructure/test-restore-backup.sh b/infrastructure/test-restore-backup.sh deleted file mode 100755 index 1022aab7..00000000 --- a/infrastructure/test-restore-backup.sh +++ /dev/null @@ -1,115 +0,0 @@ -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at https://mozilla.org/MPL/2.0/. -# -# OpenCRVS is also distributed under the terms of the Civil Registration -# & Healthcare Disclaimer located at http://opencrvs.org/license. -# -# Copyright (C) The OpenCRVS Authors located at https://github.com/opencrvs/opencrvs-core/blob/master/AUTHORS. - -#------------------------------------------------------------------------------------------------------------------ -# THIS SCRIPT RUNS FROM A GITHUB ACTION TO TEST RESTORE A BACKUP ONTO A QA SERVER AS A REGULAR MONITORING EXERCISE -#------------------------------------------------------------------------------------------------------------------ - -# Uncomment if your SSH user is not root -# -#if [ -z "$SUDO_PASSWORD" ] ; then -# echo 'Error: Missing environment variable SUDO_PASSWORD.' -# exit 1 -#fi - -if [ -z "$SSH_USER" ] ; then - echo 'Error: Missing environment variable SSH_USER.' - exit 1 -fi - -if [ -z "$SSH_HOST" ] ; then - echo 'Error: Missing environment variable SSH_HOST.' - exit 1 -fi - -if [ -z "$BACKUP_HOST" ] ; then - echo 'Error: Missing environment variable BACKUP_HOST.' - exit 1 -fi - -if [ -z "$BACKUP_DIRECTORY" ] ; then - echo 'Error: Missing environment variable BACKUP_DIRECTORY.' - exit 1 -fi - -if [ -z "$RESTORE_DIRECTORY" ] ; then - echo 'Error: Missing environment variable RESTORE_DIRECTORY.' - exit 1 -fi - -if [ -z "$REPLICAS" ] ; then - echo 'Error: Missing environment variable REPLICAS.' - exit 1 -fi - -if [ -z "$QA_BACKUP_LABEL" ] ; then - echo 'Error: Missing environment variable QA_BACKUP_LABEL.' - exit 1 -fi - -if [[ $REVERTING == "no" && -z "$PROD_BACKUP_LABEL" ]] ; then - echo 'Error: Missing environment variable PROD_BACKUP_LABEL when restoring a production backup.' - exit 1 -fi - -if [ -z "$REVERTING" ] ; then - echo 'Error: Missing environment variable REVERTING.' - exit 1 -fi - -if [ $REVERTING == "no" ] ; then - # Backup QA environment first - ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S bash /opt/opencrvs/infrastructure/emergency-backup-metadata.sh --ssh_user=$SSH_USER --ssh_host=$BACKUP_HOST --ssh_port=22 --production_ip=$SSH_HOST --remote_dir=$BACKUP_DIRECTORY/qa --replicas=$REPLICAS --label=$QA_BACKUP_LABEL" - LABEL="$PROD_BACKUP_LABEL" - REMOTE_DIR="$BACKUP_DIRECTORY/$LABEL" -else - LABEL="$QA_BACKUP_LABEL" - REMOTE_DIR="$BACKUP_DIRECTORY/qa/$LABEL" -fi - -# Copy production backup into restore folder -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/elasticsearch" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/elasticsearch/indices" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/influxdb" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/mongo" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/minio" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/metabase" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/vsexport" -ssh "$SSH_USER@$SSH_HOST" "rm -rf $RESTORE_DIRECTORY/metabase" - -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/elasticsearch $RESTORE_DIRECTORY" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/hearth-dev-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/openhim-dev-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/user-mgnt-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/application-config-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/metrics-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/webhooks-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/mongo/performance-$LABEL.gz $RESTORE_DIRECTORY/mongo/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/influxdb/$LABEL $RESTORE_DIRECTORY/influxdb" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/minio/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/minio/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/metabase/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/metabase/" -ssh "$SSH_USER@$SSH_HOST" "rsync -av --delete --progress $SSH_USER@$BACKUP_HOST:$REMOTE_DIR/vsexport/ocrvs-$LABEL.tar.gz $RESTORE_DIRECTORY/vsexport/" - -# Restore -echo "Restoring" -# Uncomment if your SSH user is not root -# -# ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S rm -rf /data/backups/elasticsearch && mv $RESTORE_DIRECTORY/elasticsearch /data/backups/" -# else -ssh "$SSH_USER@$SSH_HOST" "rm -rf /data/backups/elasticsearch && mv $RESTORE_DIRECTORY/elasticsearch /data/backups/" - -ssh "$SSH_USER@$SSH_HOST" "docker service update --force --update-parallelism 1 --update-delay 30s opencrvs_elasticsearch" -echo "Waiting 2 mins for elasticsearch to restart." -echo -sleep 120 -# Uncomment if your SSH user is not root -# -# ssh "$SSH_USER@$SSH_HOST" "echo $SUDO_PASSWORD | sudo -S bash /opt/opencrvs/infrastructure/emergency-restore-metadata.sh --label=$LABEL --replicas=$REPLICAS --backup-dir=$RESTORE_DIRECTORY" -# else -ssh "/opt/opencrvs/infrastructure/emergency-restore-metadata.sh --label=$LABEL --replicas=$REPLICAS --backup-dir=$RESTORE_DIRECTORY" \ No newline at end of file diff --git a/package.json b/package.json index cf583ff2..3e94ee4d 100644 --- a/package.json +++ b/package.json @@ -56,6 +56,7 @@ "jest-fetch-mock": "^3.0.3", "json2csv": "^4.3.0", "jsonwebtoken": "^9.0.0", + "libsodium-wrappers": "^0.7.11", "lint-staged": "^7.1.0", "niceware": "^2.0.2", "nodemon": "^2.0.22", diff --git a/yarn.lock b/yarn.lock index a9200f3e..184e0e82 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7525,6 +7525,18 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" +libsodium-wrappers@^0.7.11: + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium-wrappers/-/libsodium-wrappers-0.7.13.tgz#83299e06ee1466057ba0e64e532777d2929b90d3" + integrity sha512-kasvDsEi/r1fMzKouIDv7B8I6vNmknXwGiYodErGuESoFTohGSKZplFtVxZqHaoQ217AynyIFgnOVRitpHs0Qw== + dependencies: + libsodium "^0.7.13" + +libsodium@^0.7.13: + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium/-/libsodium-0.7.13.tgz#230712ec0b7447c57b39489c48a4af01985fb393" + integrity sha512-mK8ju0fnrKXXfleL53vtp9xiPq5hKM0zbDQtcxQIsSmxNgSxqCj6R7Hl9PkrNe2j29T4yoDaF7DJLK9/i5iWUw== + lines-and-columns@^1.1.6: version "1.2.4" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"