Skip to content

Commit

Permalink
Refactor restore scripts to make them more robust and create events
Browse files Browse the repository at this point in the history
Signed-off-by: Nicolas Bigler <[email protected]>
  • Loading branch information
TheBigLee committed Jul 26, 2023
1 parent 1de69dc commit 7aa39eb
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 8 deletions.
14 changes: 12 additions & 2 deletions pkg/comp-functions/functions/vshnredis/restore.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ func addPrepareRestoreJob(ctx context.Context, comp *vshnv1.VSHNRedis, iof *runt
Value: comp.ObjectMeta.Labels[claimNamespaceLabel],
},
{
Name: "CLAIM_NAME",
Name: "SOURCE_CLAIM_NAME",
Value: comp.Spec.Parameters.Restore.ClaimName,
},
{
Expand Down Expand Up @@ -247,6 +247,8 @@ func addCleanUpJob(ctx context.Context, comp *vshnv1.VSHNRedis, iof *runtime.Run
cleanupRestoreJobName := truncateObjectName(comp.Name + "-" + comp.Spec.Parameters.Restore.BackupName + "-cleanup-job")
restoreJobName := truncateObjectName(comp.Name + "-" + comp.Spec.Parameters.Restore.BackupName + "-restore-job")
restoreSecret := "statefulset-replicas-" + comp.Spec.Parameters.Restore.ClaimName + "-" + comp.Spec.Parameters.Restore.BackupName
claimNamespaceLabel := "crossplane.io/claim-namespace"
claimNameLabel := "crossplane.io/claim-name"

prepJob := &batchv1.Job{
ObjectMeta: metav1.ObjectMeta{
Expand All @@ -269,9 +271,17 @@ func addCleanUpJob(ctx context.Context, comp *vshnv1.VSHNRedis, iof *runtime.Run
Args: []string{cleanupRestoreScript},
Env: []corev1.EnvVar{
{
Name: "CLAIM_NAME",
Name: "CLAIM_NAMESPACE",
Value: comp.ObjectMeta.Labels[claimNamespaceLabel],
},
{
Name: "SOURCE_CLAIM_NAME",
Value: comp.Spec.Parameters.Restore.ClaimName,
},
{
Name: "DEST_CLAIM_NAME",
Value: comp.ObjectMeta.Labels[claimNameLabel],
},
{
Name: "RESTORE_JOB_NAME",
Value: restoreJobName,
Expand Down
51 changes: 47 additions & 4 deletions pkg/comp-functions/functions/vshnredis/script/cleanupRestore.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,61 @@ set -euo pipefail

echo "Wait for restore to complete"

counter=0
until [ $counter -eq 300 ] || [[ $(kubectl -n "${TARGET_NAMESPACE}" get job "${RESTORE_JOB_NAME}" -o jsonpath='{.status.succeeded}' 2> /dev/null) -eq 1 ]];
until [[ $(kubectl -n "${TARGET_NAMESPACE}" get job "${RESTORE_JOB_NAME}" -o jsonpath='{.status.succeeded}' 2> /dev/null) -eq 1 ]] || [[ $(kubectl -n "${TARGET_NAMESPACE}" get job "${RESTORE_JOB_NAME}" -o jsonpath='{.status.failed}' 2> /dev/null) -eq 1 ]];
do
(( counter+=1 ))
sleep 1
done

if [[ $(kubectl -n "${TARGET_NAMESPACE}" get job "${RESTORE_JOB_NAME}" -o jsonpath='{.status.failed}' 2> /dev/null) -eq 1 ]]; then
kubectl apply -f - <<EOF
apiVersion: v1
kind: Event
metadata:
name: "${DEST_CLAIM_NAME}-restore-failed"
namespace: "${CLAIM_NAMESPACE}"
type: Warning
firstTimestamp: $(date --utc +%FT%TZ)
lastTimestamp: $(date --utc +%FT%TZ)
message: "Restore of ${DEST_CLAIM_NAME} failed"
involvedObject:
apiVersion: vshn.appcat.vshn.io/v1
kind: VSHNRedis
name: "${DEST_CLAIM_NAME}"
namespace: "${CLAIM_NAMESPACE}"
uid: "$(kubectl -n ${CLAIM_NAMESPACE} get vshnredis.vshn.appcat.vshn.io ${DEST_CLAIM_NAME} -o jsonpath='{.metadata.uid}')"
reason: RestoreFailed
source:
component: "${TARGET_NAMESPACE}/${RESTORE_JOB_NAME}/job.batch/v1"
EOF
fi

if [[ $(kubectl -n "${TARGET_NAMESPACE}" get job "${RESTORE_JOB_NAME}" -o jsonpath='{.status.succeeded}' 2> /dev/null) -eq 1 ]]; then
kubectl apply -f - <<EOF
apiVersion: v1
kind: Event
metadata:
name: "${DEST_CLAIM_NAME}-restore-completed"
namespace: "${CLAIM_NAMESPACE}"
type: Normal
firstTimestamp: $(date --utc +%FT%TZ)
lastTimestamp: $(date --utc +%FT%TZ)
message: "Restore of ${DEST_CLAIM_NAME} succeeded"
involvedObject:
apiVersion: vshn.appcat.vshn.io/v1
kind: VSHNRedis
name: "${DEST_CLAIM_NAME}"
namespace: "${CLAIM_NAMESPACE}"
uid: "$(kubectl -n ${CLAIM_NAMESPACE} get vshnredis.vshn.appcat.vshn.io ${DEST_CLAIM_NAME} -o jsonpath='{.metadata.uid}')"
reason: RestoreSucceeded
source:
component: "${TARGET_NAMESPACE}/${RESTORE_JOB_NAME}/job.batch/v1"
EOF
fi
echo "scaling up redis"

kubectl -n "${TARGET_NAMESPACE}" scale statefulset redis-master --replicas "${NUM_REPLICAS}"

echo "cleanup secret"

kubectl -n "${TARGET_NAMESPACE}" delete secret "restore-credentials-${BACKUP_NAME}"
kubectl delete secret "statefulset-replicas-${CLAIM_NAME}-${BACKUP_NAME}"
kubectl delete secret "statefulset-replicas-${SOURCE_CLAIM_NAME}-${BACKUP_NAME}"
4 changes: 2 additions & 2 deletions pkg/comp-functions/functions/vshnredis/script/prepRestore.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

set -euo pipefail

source_namespace=$(kubectl -n "${CLAIM_NAMESPACE}" get vshnredis "${CLAIM_NAME}" -ojson | jq -r '.status.instanceNamespace')
source_namespace=$(kubectl -n "${CLAIM_NAMESPACE}" get vshnredis "${SOURCE_CLAIM_NAME}" -ojson | jq -r '.status.instanceNamespace')

echo "copy secret"

Expand All @@ -14,7 +14,7 @@ backup_path=$(kubectl -n "${source_namespace}" get snapshots.k8up.io "${BACKUP_N
backup_name=$(kubectl -n "${source_namespace}" get snapshots.k8up.io "${BACKUP_NAME}" -o jsonpath='{.spec.id}')
num_replicas=$(kubectl -n "${TARGET_NAMESPACE}" get statefulset redis-master -o jsonpath='{.spec.replicas}')
kubectl -n "${TARGET_NAMESPACE}" create secret generic "restore-credentials-${BACKUP_NAME}" --from-literal AWS_ACCESS_KEY_ID="${access_key}" --from-literal AWS_SECRET_ACCESS_KEY="${secret_key}" --from-literal RESTIC_PASSWORD="${restic_password}" --from-literal RESTIC_REPOSITORY="${restic_repository}" --from-literal BACKUP_PATH="${backup_path}" --from-literal BACKUP_NAME="${backup_name}"
kubectl create secret generic "statefulset-replicas-${CLAIM_NAME}-${BACKUP_NAME}" --from-literal NUM_REPLICAS="${num_replicas}"
kubectl create secret generic "statefulset-replicas-${SOURCE_CLAIM_NAME}-${BACKUP_NAME}" --from-literal NUM_REPLICAS="${num_replicas}"
echo "scaling down redis"

until kubectl -n "${TARGET_NAMESPACE}" get statefulset redis-master > /dev/null 2>&1
Expand Down

0 comments on commit 7aa39eb

Please sign in to comment.