Skip to content

Commit

Permalink
adapt to latest disk-objectstore PR161
Browse files Browse the repository at this point in the history
  • Loading branch information
eimrek committed Dec 7, 2023
1 parent c15fda0 commit 92e2bd9
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 56 deletions.
23 changes: 5 additions & 18 deletions aiida/cmdline/commands/cmd_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@
# For further information please visit http://www.aiida.net #
###########################################################################
"""`verdi storage` commands."""
import sys

import click
from click_spinner import spinner

from aiida.cmdline.commands.cmd_verdi import verdi
from aiida.cmdline.params import options
from aiida.cmdline.utils import decorators, echo
from aiida.common import exceptions
from aiida.storage.log import STORAGE_LOGGER


@verdi.group('storage')
Expand Down Expand Up @@ -203,29 +204,15 @@ def storage_backup(dest: str, keep: int, pg_dump_exe: str, rsync_exe: str):
NOTE: 'rsync' and other UNIX-specific commands are called, thus the command will not work on
non-UNIX environments.
"""
from disk_objectstore import backup_utils

from aiida.manage.manager import get_manager

manager = get_manager()
storage = manager.get_profile_storage()

try:
backup_utils_instance = backup_utils.BackupUtilities(dest, keep, rsync_exe, STORAGE_LOGGER)
except ValueError as exc:
click.echo(f'Error: {exc}')
return

success = backup_utils_instance.validate_inputs(additional_exes=[pg_dump_exe])
success = storage.backup(dest, keep, exes={'rsync': rsync_exe, 'pg_dump': pg_dump_exe})
if not success:
click.echo('Input validation failed.')
return

success = backup_utils_instance.backup_auto_folders(
lambda path, prev: storage.backup(backup_utils_instance, path, prev_backup=prev, pg_dump_exe=pg_dump_exe)
)
if not success:
click.echo('Error: backup failed.')
return
click.echo('Backup was not successful.')
sys.exit(1)

click.echo(f'Success! Profile backed up to {dest}')
19 changes: 4 additions & 15 deletions aiida/orm/implementation/storage_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,8 @@
from __future__ import annotations

import abc
import pathlib
from typing import TYPE_CHECKING, Any, ContextManager, List, Optional, Sequence, TypeVar, Union

from disk_objectstore import backup_utils

if TYPE_CHECKING:
from aiida.manage.configuration.profile import Profile
from aiida.orm.autogroup import AutogroupManager
Expand Down Expand Up @@ -304,24 +301,16 @@ def maintain(self, full: bool = False, dry_run: bool = False, **kwargs) -> None:
@abc.abstractmethod
def backup(
self,
backup_utils_instance: backup_utils.BackupUtilities,
path: pathlib.Path,
prev_backup: Optional[pathlib.Path] = None,
pg_dump_exe: str = 'pg_dump',
dest: str,
keep: int,
exes: dict,
) -> bool:
"""Create a backup of the storage contents.
:param path:
Path to where the backup will be created. If 'remote' is specified, must be an absolute path,
otherwise can be relative.
:param prev_backup:
Path to the previous backup. Rsync calls will be hard-linked to this path, making the backup
incremental and efficient. If this is specified, the automatic folder management is not used.
:return:
True is successful and False if unsuccessful.
"""
raise NotImplementedError

def get_info(self, detailed: bool = False) -> dict:
"""Return general information on the storage.
Expand Down
48 changes: 25 additions & 23 deletions aiida/storage/psql_dos/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -451,14 +451,12 @@ def get_info(self, detailed: bool = False) -> dict:
results['repository'] = self.get_repository().get_info(detailed)
return results


def backup( # pylint: disable=too-many-locals, too-many-return-statements, too-many-branches, too-many-statements
def _backup(
self,
backup_utils_instance: backup_utils.BackupUtilities,
manager: backup_utils.BackupManager,
path: pathlib.Path,
prev_backup: Optional[pathlib.Path] = None,
pg_dump_exe: str = 'pg_dump',
) -> bool:
) -> None:
"""Create a backup of the postgres database and disk-objectstore to the provided path.
:param path:
Expand Down Expand Up @@ -488,13 +486,13 @@ def backup( # pylint: disable=too-many-locals, too-many-return-statements, too-m
try:
ProfileAccessManager(self._profile).request_access()
except LockedProfileError:
STORAGE_LOGGER.error('The profile is locked!')
return False
raise backup_utils.BackupError('The profile is locked!')

# step 1: first run the storage maintenance version that can safely be performed while aiida is running
self.maintain(full=False, compress=True)

# step 2: dump the PostgreSQL database into a temporary directory
pg_dump_exe = manager.exes['pg_dump']
with tempfile.TemporaryDirectory() as temp_dir_name:
psql_temp_loc = pathlib.Path(temp_dir_name) / 'db.psql'

Expand All @@ -508,36 +506,40 @@ def backup( # pylint: disable=too-many-locals, too-many-return-statements, too-m
try:
subprocess.run(cmd, check=True, env=env)
except subprocess.CalledProcessError as exc:
STORAGE_LOGGER.error(f'pg_dump: {exc}')
return False
raise backup_utils.BackupError(f'pg_dump: {exc}')

if psql_temp_loc.is_file():
STORAGE_LOGGER.info(f'Dumped the PostgreSQL database to {str(psql_temp_loc)}')
else:
STORAGE_LOGGER.error(f"'{str(psql_temp_loc)}' was not created.")
return False
raise backup_utils.BackupError(f"'{str(psql_temp_loc)}' was not created.")

# step 3: transfer the PostgreSQL database file
success = backup_utils_instance.call_rsync(
psql_temp_loc, path, link_dest=prev_backup, dest_trailing_slash=True
)
if not success:
return False
manager.call_rsync(psql_temp_loc, path, link_dest=prev_backup, dest_trailing_slash=True)

# step 4: back up the disk-objectstore
success = backup_utils_instance.backup_container(
container, path / 'container', prev_backup=prev_backup / 'container' if prev_backup else None
backup_utils.backup_container(
manager, container, path / 'container', prev_backup=prev_backup / 'container' if prev_backup else None
)
if not success:
return False

# step 5: back up aiida config.json file
try:
config = get_config()
success = backup_utils_instance.call_rsync(pathlib.Path(config.filepath), path)
if not success:
return False
manager.call_rsync(pathlib.Path(config.filepath), path)
except (exceptions.MissingConfigurationError, exceptions.ConfigurationError):
STORAGE_LOGGER.warning('aiida config.json not found!')

def backup(
self,
dest: str,
keep: int,
exes: dict,
) -> bool:

try:
backup_manager = backup_utils.BackupManager(dest, STORAGE_LOGGER, exes=exes, keep=keep)
backup_manager.backup_auto_folders(lambda path, prev: self._backup(backup_manager, path, prev))
except backup_utils.BackupError as exc:
STORAGE_LOGGER.error(f'Error: {exc}')
return False

return True

0 comments on commit 92e2bd9

Please sign in to comment.