0
0
Fork 0
mirror of https://projects.torsion.org/witten/borgmatic.git synced 2025-04-14 16:18:30 +00:00

Remove the "dump_data_sources" command hook, as it doesn't really solve the use case and works differently than all the other command hooks ().

This commit is contained in:
Dan Helfman 2025-03-20 11:13:37 -07:00
parent 624a7de622
commit c2409d9968
22 changed files with 428 additions and 857 deletions

View file

@ -959,7 +959,6 @@ properties:
- repository - repository
- configuration - configuration
- everything - everything
- dump_data_sources
description: | description: |
Name for the point in borgmatic's execution that Name for the point in borgmatic's execution that
the commands should be run before (required if the commands should be run before (required if
@ -972,19 +971,7 @@ properties:
repositories in the current configuration file. repositories in the current configuration file.
* "everything" runs before all configuration * "everything" runs before all configuration
files. files.
* "dump_data_sources" runs before each data
source is dumped.
example: action example: action
hooks:
type: array
items:
type: string
description: |
List of names of other hooks that this command
hook applies to. Defaults to all hooks of the
relevant type. Only supported for the
"dump_data_sources" hook.
example: postgresql
when: when:
type: array type: array
items: items:
@ -1013,9 +1000,7 @@ properties:
- borg - borg
description: | description: |
List of actions for which the commands will be List of actions for which the commands will be
run. Defaults to running for all actions. Ignored run. Defaults to running for all actions.
for "dump_data_sources", which by its nature only
runs for "create".
example: [create, prune, compact, check] example: [create, prune, compact, check]
run: run:
type: array type: array
@ -1037,7 +1022,6 @@ properties:
- configuration - configuration
- everything - everything
- error - error
- dump_data_sources
description: | description: |
Name for the point in borgmatic's execution that Name for the point in borgmatic's execution that
the commands should be run after (required if the commands should be run after (required if
@ -1051,19 +1035,7 @@ properties:
* "everything" runs after all configuration * "everything" runs after all configuration
files. files.
* "error" runs after an error occurs. * "error" runs after an error occurs.
* "dump_data_sources" runs after each data
source is dumped.
example: action example: action
hooks:
type: array
items:
type: string
description: |
List of names of other hooks that this command
hook applies to. Defaults to all hooks of the
relevant type. Only supported for the
"dump_data_sources" hook.
example: postgresql
when: when:
type: array type: array
items: items:
@ -1093,9 +1065,7 @@ properties:
description: | description: |
Only trigger the hook when borgmatic is run with Only trigger the hook when borgmatic is run with
particular actions listed here. Defaults to particular actions listed here. Defaults to
running for all actions. Ignored for running for all actions.
"dump_data_sources", which by its nature only runs
for "create".
example: [create, prune, compact, check] example: [create, prune, compact, check]
run: run:
type: array type: array

View file

@ -55,11 +55,9 @@ def filter_hooks(command_hooks, before=None, after=None, hook_name=None, action_
return tuple( return tuple(
hook_config hook_config
for hook_config in command_hooks or () for hook_config in command_hooks or ()
for config_hook_names in (hook_config.get('hooks'),)
for config_action_names in (hook_config.get('when'),) for config_action_names in (hook_config.get('when'),)
if before is None or hook_config.get('before') == before if before is None or hook_config.get('before') == before
if after is None or hook_config.get('after') == after if after is None or hook_config.get('after') == after
if hook_name is None or config_hook_names is None or hook_name in config_hook_names
if action_names is None if action_names is None
or config_action_names is None or config_action_names is None
or set(config_action_names or ()).intersection(set(action_names)) or set(config_action_names or ()).intersection(set(action_names))

View file

@ -6,7 +6,6 @@ import os
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -38,46 +37,39 @@ def dump_data_sources(
if hook_config and hook_config.get('store_config_files') is False: if hook_config and hook_config.get('store_config_files') is False:
return [] return []
with borgmatic.hooks.command.Before_after_hooks( borgmatic_manifest_path = os.path.join(
command_hooks=config.get('commands'), borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
before_after='dump_data_sources', )
umask=config.get('umask'),
dry_run=dry_run,
hook_name='bootstrap',
):
borgmatic_manifest_path = os.path.join(
borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
)
if dry_run:
return []
os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
with open(borgmatic_manifest_path, 'w') as manifest_file:
json.dump(
{
'borgmatic_version': importlib.metadata.version('borgmatic'),
'config_paths': config_paths,
},
manifest_file,
)
patterns.extend(
borgmatic.borg.pattern.Pattern(
config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
)
for config_path in config_paths
)
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'bootstrap'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
if dry_run:
return [] return []
os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
with open(borgmatic_manifest_path, 'w') as manifest_file:
json.dump(
{
'borgmatic_version': importlib.metadata.version('borgmatic'),
'config_paths': config_paths,
},
manifest_file,
)
patterns.extend(
borgmatic.borg.pattern.Pattern(
config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK
)
for config_path in config_paths
)
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'bootstrap'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
return []
def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run): def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
''' '''

View file

@ -9,7 +9,6 @@ import subprocess
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.execute import borgmatic.execute
import borgmatic.hooks.command
import borgmatic.hooks.data_source.snapshot import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -250,48 +249,41 @@ def dump_data_sources(
If this is a dry run, then don't actually snapshot anything. If this is a dry run, then don't actually snapshot anything.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
command_hooks=config.get('commands'), logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='btrfs',
):
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
# Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those
# patterns that came from actual user configuration (as opposed to, say, other hooks). # patterns that came from actual user configuration (as opposed to, say, other hooks).
btrfs_command = hook_config.get('btrfs_command', 'btrfs') btrfs_command = hook_config.get('btrfs_command', 'btrfs')
findmnt_command = hook_config.get('findmnt_command', 'findmnt') findmnt_command = hook_config.get('findmnt_command', 'findmnt')
subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns) subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
if not subvolumes: if not subvolumes:
logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}') logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
# Snapshot each subvolume, rewriting patterns to use their snapshot paths. # Snapshot each subvolume, rewriting patterns to use their snapshot paths.
for subvolume in subvolumes: for subvolume in subvolumes:
logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume') logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
snapshot_path = make_snapshot_path(subvolume.path) snapshot_path = make_snapshot_path(subvolume.path)
if dry_run: if dry_run:
continue continue
snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path) snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path)
for pattern in subvolume.contained_patterns: for pattern in subvolume.contained_patterns:
snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern) snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern)
# Attempt to update the pattern in place, since pattern order matters to Borg. # Attempt to update the pattern in place, since pattern order matters to Borg.
try: try:
patterns[patterns.index(pattern)] = snapshot_pattern patterns[patterns.index(pattern)] = snapshot_pattern
except ValueError: except ValueError:
patterns.append(snapshot_pattern) patterns.append(snapshot_pattern)
patterns.append(make_snapshot_exclude_pattern(subvolume.path)) patterns.append(make_snapshot_exclude_pattern(subvolume.path))
return [] return []
def delete_snapshot(btrfs_command, snapshot_path): # pragma: no cover def delete_snapshot(btrfs_command, snapshot_path): # pragma: no cover

View file

@ -10,7 +10,6 @@ import subprocess
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.execute import borgmatic.execute
import borgmatic.hooks.command
import borgmatic.hooks.data_source.snapshot import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -198,84 +197,77 @@ def dump_data_sources(
If this is a dry run, then don't actually snapshot anything. If this is a dry run, then don't actually snapshot anything.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
command_hooks=config.get('commands'), logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='lvm',
):
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
# List logical volumes to get their mount points, but only consider those patterns that came # List logical volumes to get their mount points, but only consider those patterns that came
# from actual user configuration (as opposed to, say, other hooks). # from actual user configuration (as opposed to, say, other hooks).
lsblk_command = hook_config.get('lsblk_command', 'lsblk') lsblk_command = hook_config.get('lsblk_command', 'lsblk')
requested_logical_volumes = get_logical_volumes(lsblk_command, patterns) requested_logical_volumes = get_logical_volumes(lsblk_command, patterns)
# Snapshot each logical volume, rewriting source directories to use the snapshot paths. # Snapshot each logical volume, rewriting source directories to use the snapshot paths.
snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}' snapshot_suffix = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory) normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
if not requested_logical_volumes: if not requested_logical_volumes:
logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}') logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
for logical_volume in requested_logical_volumes: for logical_volume in requested_logical_volumes:
snapshot_name = f'{logical_volume.name}_{snapshot_suffix}' snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
logger.debug( logger.debug(
f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}' f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
)
if not dry_run:
snapshot_logical_volume(
hook_config.get('lvcreate_command', 'lvcreate'),
snapshot_name,
logical_volume.device_path,
hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE),
) )
if not dry_run: # Get the device path for the snapshot we just created.
snapshot_logical_volume( try:
hook_config.get('lvcreate_command', 'lvcreate'), snapshot = get_snapshots(
snapshot_name, hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name
logical_volume.device_path, )[0]
hook_config.get('snapshot_size', DEFAULT_SNAPSHOT_SIZE), except IndexError:
) raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
# Get the device path for the snapshot we just created. # Mount the snapshot into a particular named temporary directory so that the snapshot ends
# up in the Borg archive at the "original" logical volume mount point path.
snapshot_mount_path = os.path.join(
normalized_runtime_directory,
'lvm_snapshots',
hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
MOUNT_POINT_HASH_LENGTH
),
logical_volume.mount_point.lstrip(os.path.sep),
)
logger.debug(
f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
continue
mount_snapshot(
hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
)
for pattern in logical_volume.contained_patterns:
snapshot_pattern = make_borg_snapshot_pattern(
pattern, logical_volume, normalized_runtime_directory
)
# Attempt to update the pattern in place, since pattern order matters to Borg.
try: try:
snapshot = get_snapshots( patterns[patterns.index(pattern)] = snapshot_pattern
hook_config.get('lvs_command', 'lvs'), snapshot_name=snapshot_name except ValueError:
)[0] patterns.append(snapshot_pattern)
except IndexError:
raise ValueError(f'Cannot find LVM snapshot {snapshot_name}')
# Mount the snapshot into a particular named temporary directory so that the snapshot ends return []
# up in the Borg archive at the "original" logical volume mount point path.
snapshot_mount_path = os.path.join(
normalized_runtime_directory,
'lvm_snapshots',
hashlib.shake_256(logical_volume.mount_point.encode('utf-8')).hexdigest(
MOUNT_POINT_HASH_LENGTH
),
logical_volume.mount_point.lstrip(os.path.sep),
)
logger.debug(
f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
continue
mount_snapshot(
hook_config.get('mount_command', 'mount'), snapshot.device_path, snapshot_mount_path
)
for pattern in logical_volume.contained_patterns:
snapshot_pattern = make_borg_snapshot_pattern(
pattern, logical_volume, normalized_runtime_directory
)
# Attempt to update the pattern in place, since pattern order matters to Borg.
try:
patterns[patterns.index(pattern)] = snapshot_pattern
except ValueError:
patterns.append(snapshot_pattern)
return []
def unmount_snapshot(umount_command, snapshot_mount_path): # pragma: no cover def unmount_snapshot(umount_command, snapshot_mount_path): # pragma: no cover

View file

@ -6,7 +6,6 @@ import shlex
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
import borgmatic.hooks.credential.parse import borgmatic.hooks.credential.parse
from borgmatic.execute import ( from borgmatic.execute import (
execute_command, execute_command,
@ -243,78 +242,71 @@ def dump_data_sources(
Also append the the parent directory of the database dumps to the given patterns list, so the Also append the the parent directory of the database dumps to the given patterns list, so the
dumps actually get backed up. dumps actually get backed up.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
command_hooks=config.get('commands'), processes = []
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='mariadb',
):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'Dumping MariaDB databases{dry_run_label}') logger.info(f'Dumping MariaDB databases{dry_run_label}')
for database in databases: for database in databases:
dump_path = make_dump_path(borgmatic_runtime_directory) dump_path = make_dump_path(borgmatic_runtime_directory)
username = borgmatic.hooks.credential.parse.resolve_credential( username = borgmatic.hooks.credential.parse.resolve_credential(
database.get('username'), config database.get('username'), config
) )
password = borgmatic.hooks.credential.parse.resolve_credential( password = borgmatic.hooks.credential.parse.resolve_credential(
database.get('password'), config database.get('password'), config
) )
environment = dict(os.environ) environment = dict(os.environ)
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, config, username, password, environment, dry_run database, config, username, password, environment, dry_run
) )
if not dump_database_names: if not dump_database_names:
if dry_run: if dry_run:
continue continue
raise ValueError('Cannot find any MariaDB databases to dump.') raise ValueError('Cannot find any MariaDB databases to dump.')
if database['name'] == 'all' and database.get('format'): if database['name'] == 'all' and database.get('format'):
for dump_name in dump_database_names: for dump_name in dump_database_names:
renamed_database = copy.copy(database) renamed_database = copy.copy(database)
renamed_database['name'] = dump_name renamed_database['name'] = dump_name
processes.append(
execute_dump_command(
renamed_database,
config,
username,
password,
dump_path,
(dump_name,),
environment,
dry_run,
dry_run_label,
)
)
else:
processes.append( processes.append(
execute_dump_command( execute_dump_command(
database, renamed_database,
config, config,
username, username,
password, password,
dump_path, dump_path,
dump_database_names, (dump_name,),
environment, environment,
dry_run, dry_run,
dry_run_label, dry_run_label,
) )
) )
else:
if not dry_run: processes.append(
patterns.append( execute_dump_command(
borgmatic.borg.pattern.Pattern( database,
os.path.join(borgmatic_runtime_directory, 'mariadb_databases'), config,
source=borgmatic.borg.pattern.Pattern_source.HOOK, username,
password,
dump_path,
dump_database_names,
environment,
dry_run,
dry_run_label,
) )
) )
return [process for process in processes if process] if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'mariadb_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
return [process for process in processes if process]
def remove_data_source_dumps( def remove_data_source_dumps(

View file

@ -4,7 +4,6 @@ import shlex
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
import borgmatic.hooks.credential.parse import borgmatic.hooks.credential.parse
from borgmatic.execute import execute_command, execute_command_with_processes from borgmatic.execute import execute_command, execute_command_with_processes
from borgmatic.hooks.data_source import dump from borgmatic.hooks.data_source import dump
@ -49,53 +48,46 @@ def dump_data_sources(
Also append the the parent directory of the database dumps to the given patterns list, so the Also append the the parent directory of the database dumps to the given patterns list, so the
dumps actually get backed up. dumps actually get backed up.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
command_hooks=config.get('commands'),
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='mongodb',
):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
logger.info(f'Dumping MongoDB databases{dry_run_label}') logger.info(f'Dumping MongoDB databases{dry_run_label}')
processes = [] processes = []
for database in databases: for database in databases:
name = database['name'] name = database['name']
dump_filename = dump.make_data_source_dump_filename( dump_filename = dump.make_data_source_dump_filename(
make_dump_path(borgmatic_runtime_directory), make_dump_path(borgmatic_runtime_directory),
name, name,
database.get('hostname'), database.get('hostname'),
database.get('port'), database.get('port'),
)
dump_format = database.get('format', 'archive')
logger.debug(
f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
)
if dry_run:
continue
command = build_dump_command(database, config, dump_filename, dump_format)
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(command, shell=True)
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(execute_command(command, shell=True, run_to_completion=False))
if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
) )
dump_format = database.get('format', 'archive') )
logger.debug( return processes
f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
)
if dry_run:
continue
command = build_dump_command(database, config, dump_filename, dump_format)
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(command, shell=True)
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(execute_command(command, shell=True, run_to_completion=False))
if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'mongodb_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
return processes
def make_password_config_file(password): def make_password_config_file(password):

View file

@ -5,7 +5,6 @@ import shlex
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
import borgmatic.hooks.credential.parse import borgmatic.hooks.credential.parse
import borgmatic.hooks.data_source.mariadb import borgmatic.hooks.data_source.mariadb
from borgmatic.execute import ( from borgmatic.execute import (
@ -170,78 +169,71 @@ def dump_data_sources(
Also append the the parent directory of the database dumps to the given patterns list, so the Also append the the parent directory of the database dumps to the given patterns list, so the
dumps actually get backed up. dumps actually get backed up.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
command_hooks=config.get('commands'), processes = []
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='mysql',
):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'Dumping MySQL databases{dry_run_label}') logger.info(f'Dumping MySQL databases{dry_run_label}')
for database in databases: for database in databases:
dump_path = make_dump_path(borgmatic_runtime_directory) dump_path = make_dump_path(borgmatic_runtime_directory)
username = borgmatic.hooks.credential.parse.resolve_credential( username = borgmatic.hooks.credential.parse.resolve_credential(
database.get('username'), config database.get('username'), config
) )
password = borgmatic.hooks.credential.parse.resolve_credential( password = borgmatic.hooks.credential.parse.resolve_credential(
database.get('password'), config database.get('password'), config
) )
environment = dict(os.environ) environment = dict(os.environ)
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, config, username, password, environment, dry_run database, config, username, password, environment, dry_run
) )
if not dump_database_names: if not dump_database_names:
if dry_run: if dry_run:
continue continue
raise ValueError('Cannot find any MySQL databases to dump.') raise ValueError('Cannot find any MySQL databases to dump.')
if database['name'] == 'all' and database.get('format'): if database['name'] == 'all' and database.get('format'):
for dump_name in dump_database_names: for dump_name in dump_database_names:
renamed_database = copy.copy(database) renamed_database = copy.copy(database)
renamed_database['name'] = dump_name renamed_database['name'] = dump_name
processes.append(
execute_dump_command(
renamed_database,
config,
username,
password,
dump_path,
(dump_name,),
environment,
dry_run,
dry_run_label,
)
)
else:
processes.append( processes.append(
execute_dump_command( execute_dump_command(
database, renamed_database,
config, config,
username, username,
password, password,
dump_path, dump_path,
dump_database_names, (dump_name,),
environment, environment,
dry_run, dry_run,
dry_run_label, dry_run_label,
) )
) )
else:
if not dry_run: processes.append(
patterns.append( execute_dump_command(
borgmatic.borg.pattern.Pattern( database,
os.path.join(borgmatic_runtime_directory, 'mysql_databases'), config,
source=borgmatic.borg.pattern.Pattern_source.HOOK, username,
password,
dump_path,
dump_database_names,
environment,
dry_run,
dry_run_label,
) )
) )
return [process for process in processes if process] if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'mysql_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
return [process for process in processes if process]
def remove_data_source_dumps( def remove_data_source_dumps(

View file

@ -7,7 +7,6 @@ import shlex
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
import borgmatic.hooks.credential.parse import borgmatic.hooks.credential.parse
from borgmatic.execute import ( from borgmatic.execute import (
execute_command, execute_command,
@ -142,127 +141,112 @@ def dump_data_sources(
Raise ValueError if the databases to dump cannot be determined. Raise ValueError if the databases to dump cannot be determined.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
command_hooks=config.get('commands'), processes = []
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='postgresql',
):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'Dumping PostgreSQL databases{dry_run_label}') logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
for database in databases: for database in databases:
environment = make_environment(database, config) environment = make_environment(database, config)
dump_path = make_dump_path(borgmatic_runtime_directory) dump_path = make_dump_path(borgmatic_runtime_directory)
dump_database_names = database_names_to_dump(database, config, environment, dry_run) dump_database_names = database_names_to_dump(database, config, environment, dry_run)
if not dump_database_names: if not dump_database_names:
if dry_run: if dry_run:
continue continue
raise ValueError('Cannot find any PostgreSQL databases to dump.') raise ValueError('Cannot find any PostgreSQL databases to dump.')
for database_name in dump_database_names: for database_name in dump_database_names:
dump_format = database.get('format', None if database_name == 'all' else 'custom') dump_format = database.get('format', None if database_name == 'all' else 'custom')
compression = database.get('compression') compression = database.get('compression')
default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump' default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
dump_command = tuple( dump_command = tuple(
shlex.quote(part) shlex.quote(part)
for part in shlex.split(database.get('pg_dump_command') or default_dump_command) for part in shlex.split(database.get('pg_dump_command') or default_dump_command)
)
dump_filename = dump.make_data_source_dump_filename(
dump_path,
database_name,
database.get('hostname'),
database.get('port'),
)
if os.path.exists(dump_filename):
logger.warning(
f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
) )
dump_filename = dump.make_data_source_dump_filename( continue
dump_path,
database_name,
database.get('hostname'),
database.get('port'),
)
if os.path.exists(dump_filename):
logger.warning(
f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
)
continue
command = ( command = (
dump_command dump_command
+ ( + (
'--no-password', '--no-password',
'--clean', '--clean',
'--if-exists', '--if-exists',
)
+ (
('--host', shlex.quote(database['hostname']))
if 'hostname' in database
else ()
)
+ (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
+ (
(
'--username',
shlex.quote(
borgmatic.hooks.credential.parse.resolve_credential(
database['username'], config
)
),
)
if 'username' in database
else ()
)
+ (('--no-owner',) if database.get('no_owner', False) else ())
+ (('--format', shlex.quote(dump_format)) if dump_format else ())
+ (
('--compress', shlex.quote(str(compression)))
if compression is not None
else ()
)
+ (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
+ (
tuple(shlex.quote(option) for option in database['options'].split(' '))
if 'options' in database
else ()
)
+ (() if database_name == 'all' else (shlex.quote(database_name),))
# Use shell redirection rather than the --file flag to sidestep synchronization issues
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
# format in a particular, a named destination is required, and redirection doesn't work.
+ (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
) )
+ (('--host', shlex.quote(database['hostname'])) if 'hostname' in database else ())
logger.debug( + (('--port', shlex.quote(str(database['port']))) if 'port' in database else ())
f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}' + (
(
'--username',
shlex.quote(
borgmatic.hooks.credential.parse.resolve_credential(
database['username'], config
)
),
)
if 'username' in database
else ()
) )
if dry_run: + (('--no-owner',) if database.get('no_owner', False) else ())
continue + (('--format', shlex.quote(dump_format)) if dump_format else ())
+ (('--compress', shlex.quote(str(compression))) if compression is not None else ())
+ (('--file', shlex.quote(dump_filename)) if dump_format == 'directory' else ())
+ (
tuple(shlex.quote(option) for option in database['options'].split(' '))
if 'options' in database
else ()
)
+ (() if database_name == 'all' else (shlex.quote(database_name),))
# Use shell redirection rather than the --file flag to sidestep synchronization issues
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
# format in a particular, a named destination is required, and redirection doesn't work.
+ (('>', shlex.quote(dump_filename)) if dump_format != 'directory' else ())
)
if dump_format == 'directory': logger.debug(
dump.create_parent_directory_for_dump(dump_filename) f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
continue
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(
command,
shell=True,
environment=environment,
)
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(
execute_command( execute_command(
command, command,
shell=True, shell=True,
environment=environment, environment=environment,
run_to_completion=False,
) )
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(
execute_command(
command,
shell=True,
environment=environment,
run_to_completion=False,
)
)
if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
) )
)
return processes if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'postgresql_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
)
)
return processes
def remove_data_source_dumps( def remove_data_source_dumps(

View file

@ -4,7 +4,6 @@ import shlex
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.hooks.command
from borgmatic.execute import execute_command, execute_command_with_processes from borgmatic.execute import execute_command, execute_command_with_processes
from borgmatic.hooks.data_source import dump from borgmatic.hooks.data_source import dump
@ -48,66 +47,58 @@ def dump_data_sources(
Also append the the parent directory of the database dumps to the given patterns list, so the Also append the the parent directory of the database dumps to the given patterns list, so the
dumps actually get backed up. dumps actually get backed up.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
command_hooks=config.get('commands'), processes = []
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='sqlite',
):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'Dumping SQLite databases{dry_run_label}') logger.info(f'Dumping SQLite databases{dry_run_label}')
for database in databases: for database in databases:
database_path = database['path'] database_path = database['path']
if database['name'] == 'all': if database['name'] == 'all':
logger.warning('The "all" database name has no meaning for SQLite databases') logger.warning('The "all" database name has no meaning for SQLite databases')
if not os.path.exists(database_path): if not os.path.exists(database_path):
logger.warning( logger.warning(
f'No SQLite database at {database_path}; an empty database will be created and dumped' f'No SQLite database at {database_path}; an empty database will be created and dumped'
)
dump_path = make_dump_path(borgmatic_runtime_directory)
dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename):
logger.warning(
f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
)
continue
sqlite_command = tuple(
shlex.quote(part)
for part in shlex.split(database.get('sqlite_command') or 'sqlite3')
)
command = sqlite_command + (
shlex.quote(database_path),
'.dump',
'>',
shlex.quote(dump_filename),
) )
logger.debug( dump_path = make_dump_path(borgmatic_runtime_directory)
f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}' dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename):
logger.warning(
f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
) )
if dry_run: continue
continue
dump.create_named_pipe_for_dump(dump_filename) sqlite_command = tuple(
processes.append(execute_command(command, shell=True, run_to_completion=False)) shlex.quote(part) for part in shlex.split(database.get('sqlite_command') or 'sqlite3')
)
command = sqlite_command + (
shlex.quote(database_path),
'.dump',
'>',
shlex.quote(dump_filename),
)
if not dry_run: logger.debug(
patterns.append( f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
borgmatic.borg.pattern.Pattern( )
os.path.join(borgmatic_runtime_directory, 'sqlite_databases'), if dry_run:
source=borgmatic.borg.pattern.Pattern_source.HOOK, continue
)
dump.create_named_pipe_for_dump(dump_filename)
processes.append(execute_command(command, shell=True, run_to_completion=False))
if not dry_run:
patterns.append(
borgmatic.borg.pattern.Pattern(
os.path.join(borgmatic_runtime_directory, 'sqlite_databases'),
source=borgmatic.borg.pattern.Pattern_source.HOOK,
) )
)
return processes return processes
def remove_data_source_dumps( def remove_data_source_dumps(

View file

@ -9,7 +9,6 @@ import subprocess
import borgmatic.borg.pattern import borgmatic.borg.pattern
import borgmatic.config.paths import borgmatic.config.paths
import borgmatic.execute import borgmatic.execute
import borgmatic.hooks.command
import borgmatic.hooks.data_source.snapshot import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -244,71 +243,64 @@ def dump_data_sources(
If this is a dry run, then don't actually snapshot anything. If this is a dry run, then don't actually snapshot anything.
''' '''
with borgmatic.hooks.command.Before_after_hooks( dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
command_hooks=config.get('commands'), logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
before_after='dump_data_sources',
umask=config.get('umask'),
dry_run=dry_run,
hook_name='zfs',
):
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
# List ZFS datasets to get their mount points, but only consider those patterns that came from # List ZFS datasets to get their mount points, but only consider those patterns that came from
# actual user configuration (as opposed to, say, other hooks). # actual user configuration (as opposed to, say, other hooks).
zfs_command = hook_config.get('zfs_command', 'zfs') zfs_command = hook_config.get('zfs_command', 'zfs')
requested_datasets = get_datasets_to_backup(zfs_command, patterns) requested_datasets = get_datasets_to_backup(zfs_command, patterns)
# Snapshot each dataset, rewriting patterns to use the snapshot paths. # Snapshot each dataset, rewriting patterns to use the snapshot paths.
snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}' snapshot_name = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}'
normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory) normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
if not requested_datasets: if not requested_datasets:
logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}') logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
for dataset in requested_datasets: for dataset in requested_datasets:
full_snapshot_name = f'{dataset.name}@{snapshot_name}' full_snapshot_name = f'{dataset.name}@{snapshot_name}'
logger.debug( logger.debug(
f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}' f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
)
if not dry_run:
snapshot_dataset(zfs_command, full_snapshot_name)
# Mount the snapshot into a particular named temporary directory so that the snapshot ends
# up in the Borg archive at the "original" dataset mount point path.
snapshot_mount_path = os.path.join(
normalized_runtime_directory,
'zfs_snapshots',
hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
MOUNT_POINT_HASH_LENGTH
),
dataset.mount_point.lstrip(os.path.sep),
)
logger.debug(
f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
continue
mount_snapshot(
hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
)
for pattern in dataset.contained_patterns:
snapshot_pattern = make_borg_snapshot_pattern(
pattern, dataset, normalized_runtime_directory
) )
if not dry_run: # Attempt to update the pattern in place, since pattern order matters to Borg.
snapshot_dataset(zfs_command, full_snapshot_name) try:
patterns[patterns.index(pattern)] = snapshot_pattern
except ValueError:
patterns.append(snapshot_pattern)
# Mount the snapshot into a particular named temporary directory so that the snapshot ends return []
# up in the Borg archive at the "original" dataset mount point path.
snapshot_mount_path = os.path.join(
normalized_runtime_directory,
'zfs_snapshots',
hashlib.shake_256(dataset.mount_point.encode('utf-8')).hexdigest(
MOUNT_POINT_HASH_LENGTH
),
dataset.mount_point.lstrip(os.path.sep),
)
logger.debug(
f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
continue
mount_snapshot(
hook_config.get('mount_command', 'mount'), full_snapshot_name, snapshot_mount_path
)
for pattern in dataset.contained_patterns:
snapshot_pattern = make_borg_snapshot_pattern(
pattern, dataset, normalized_runtime_directory
)
# Attempt to update the pattern in place, since pattern order matters to Borg.
try:
patterns[patterns.index(pattern)] = snapshot_pattern
except ValueError:
patterns.append(snapshot_pattern)
return []
def unmount_snapshot(umount_command, snapshot_mount_path): # pragma: no cover def unmount_snapshot(umount_command, snapshot_mount_path): # pragma: no cover

View file

@ -71,23 +71,6 @@ those two hooks. This allows you to perform cleanup steps that correspond to `be
commands—even when something goes wrong. This is a departure from the way that the deprecated commands—even when something goes wrong. This is a departure from the way that the deprecated
`after_*` hooks worked. `after_*` hooks worked.
There's also another command hook that works a little differently:
```yaml
commands:
- before: dump_data_sources
hooks: [postgresql]
run:
- echo "Right before the PostgreSQL database dump!"
```
This command hook has the following options:
* `before` or `after`: Name for the point in borgmatic's execution that the commands should be run before or after:
* `dump_data_sources` runs before or after data sources are dumped (databases dumped or filesystems snapshotted) for each hook named in `hooks`.
* `hooks`: Names of other hooks that this command hook applies to, e.g. `postgresql`, `mariadb`, `zfs`, `btrfs`, etc. Defaults to all hooks of the relevant type.
* `run`: One or more shell commands or scripts to run when this command hook is triggered.
### Order of execution ### Order of execution
@ -102,9 +85,6 @@ borgmatic for the `create` and `prune` actions. Here's the order of execution:
* Run `before: configuration` hooks (from the first configuration file). * Run `before: configuration` hooks (from the first configuration file).
* Run `before: repository` hooks (for the first repository). * Run `before: repository` hooks (for the first repository).
* Run `before: action` hooks for `create`. * Run `before: action` hooks for `create`.
* Run `before: dump_data_sources` hooks (e.g. for the PostgreSQL hook).
* Actually dump data sources (e.g. PostgreSQL databases).
* Run `after: dump_data_sources` hooks (e.g. for the PostgreSQL hook).
* Actually run the `create` action (e.g. `borg create`). * Actually run the `create` action (e.g. `borg create`).
* Run `after: action` hooks for `create`. * Run `after: action` hooks for `create`.
* Run `before: action` hooks for `prune`. * Run `before: action` hooks for `prune`.

View file

@ -6,9 +6,6 @@ from borgmatic.hooks.data_source import bootstrap as module
def test_dump_data_sources_creates_manifest_file(): def test_dump_data_sources_creates_manifest_file():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
flexmock(module.os).should_receive('makedirs') flexmock(module.os).should_receive('makedirs')
flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0') flexmock(module.importlib.metadata).should_receive('version').and_return('1.0.0')
@ -35,7 +32,6 @@ def test_dump_data_sources_creates_manifest_file():
def test_dump_data_sources_with_store_config_files_false_does_not_create_manifest_file(): def test_dump_data_sources_with_store_config_files_false_does_not_create_manifest_file():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').never()
flexmock(module.os).should_receive('makedirs').never() flexmock(module.os).should_receive('makedirs').never()
flexmock(module.json).should_receive('dump').never() flexmock(module.json).should_receive('dump').never()
hook_config = {'store_config_files': False} hook_config = {'store_config_files': False}
@ -51,9 +47,6 @@ def test_dump_data_sources_with_store_config_files_false_does_not_create_manifes
def test_dump_data_sources_with_dry_run_does_not_create_manifest_file(): def test_dump_data_sources_with_dry_run_does_not_create_manifest_file():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
flexmock(module.os).should_receive('makedirs').never() flexmock(module.os).should_receive('makedirs').never()
flexmock(module.json).should_receive('dump').never() flexmock(module.json).should_receive('dump').never()

View file

@ -269,9 +269,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns(): def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {}} config = {'btrfs': {}}
flexmock(module).should_receive('get_subvolumes').and_return( flexmock(module).should_receive('get_subvolumes').and_return(
@ -350,9 +347,6 @@ def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
def test_dump_data_sources_uses_custom_btrfs_command_in_commands(): def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {'btrfs_command': '/usr/local/bin/btrfs'}} config = {'btrfs': {'btrfs_command': '/usr/local/bin/btrfs'}}
flexmock(module).should_receive('get_subvolumes').and_return( flexmock(module).should_receive('get_subvolumes').and_return(
@ -406,9 +400,6 @@ def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
def test_dump_data_sources_uses_custom_findmnt_command_in_commands(): def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {'findmnt_command': '/usr/local/bin/findmnt'}} config = {'btrfs': {'findmnt_command': '/usr/local/bin/findmnt'}}
flexmock(module).should_receive('get_subvolumes').with_args( flexmock(module).should_receive('get_subvolumes').with_args(
@ -464,9 +455,6 @@ def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update(): def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {}} config = {'btrfs': {}}
flexmock(module).should_receive('get_subvolumes').and_return( flexmock(module).should_receive('get_subvolumes').and_return(
@ -495,9 +483,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patterns_update(): def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patterns_update():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {}} config = {'btrfs': {}}
flexmock(module).should_receive('get_subvolumes').and_return(()) flexmock(module).should_receive('get_subvolumes').and_return(())
@ -522,9 +507,6 @@ def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patter
def test_dump_data_sources_snapshots_adds_to_existing_exclude_patterns(): def test_dump_data_sources_snapshots_adds_to_existing_exclude_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')] patterns = [Pattern('/foo'), Pattern('/mnt/subvol1')]
config = {'btrfs': {}, 'exclude_patterns': ['/bar']} config = {'btrfs': {}, 'exclude_patterns': ['/bar']}
flexmock(module).should_receive('get_subvolumes').and_return( flexmock(module).should_receive('get_subvolumes').and_return(

View file

@ -282,9 +282,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns(): def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {}} config = {'lvm': {}}
patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')] patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
logical_volumes = ( logical_volumes = (
@ -354,9 +351,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
def test_dump_data_sources_with_no_logical_volumes_skips_snapshots(): def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {}} config = {'lvm': {}}
patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')] patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
flexmock(module).should_receive('get_logical_volumes').and_return(()) flexmock(module).should_receive('get_logical_volumes').and_return(())
@ -379,9 +373,6 @@ def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
def test_dump_data_sources_uses_snapshot_size_for_snapshot(): def test_dump_data_sources_uses_snapshot_size_for_snapshot():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {'snapshot_size': '1000PB'}} config = {'lvm': {'snapshot_size': '1000PB'}}
patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')] patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
logical_volumes = ( logical_volumes = (
@ -457,9 +448,6 @@ def test_dump_data_sources_uses_snapshot_size_for_snapshot():
def test_dump_data_sources_uses_custom_commands(): def test_dump_data_sources_uses_custom_commands():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = { config = {
'lvm': { 'lvm': {
'lsblk_command': '/usr/local/bin/lsblk', 'lsblk_command': '/usr/local/bin/lsblk',
@ -546,9 +534,6 @@ def test_dump_data_sources_uses_custom_commands():
def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patterns(): def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {}} config = {'lvm': {}}
patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')] patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
flexmock(module).should_receive('get_logical_volumes').and_return( flexmock(module).should_receive('get_logical_volumes').and_return(
@ -600,9 +585,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patte
def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns(): def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {}} config = {'lvm': {}}
patterns = [Pattern('/hmm')] patterns = [Pattern('/hmm')]
logical_volumes = ( logical_volumes = (
@ -673,9 +655,6 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
def test_dump_data_sources_with_missing_snapshot_errors(): def test_dump_data_sources_with_missing_snapshot_errors():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
config = {'lvm': {}} config = {'lvm': {}}
patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')] patterns = [Pattern('/mnt/lvolume1/subdir'), Pattern('/mnt/lvolume2')]
flexmock(module).should_receive('get_logical_volumes').and_return( flexmock(module).should_receive('get_logical_volumes').and_return(

View file

@ -237,9 +237,6 @@ def test_use_streaming_false_for_no_databases():
def test_dump_data_sources_dumps_each_database(): def test_dump_data_sources_dumps_each_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -281,9 +278,6 @@ def test_dump_data_sources_dumps_each_database():
def test_dump_data_sources_dumps_with_password(): def test_dump_data_sources_dumps_with_password():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'} database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -318,9 +312,6 @@ def test_dump_data_sources_dumps_with_password():
def test_dump_data_sources_dumps_all_databases_at_once(): def test_dump_data_sources_dumps_all_databases_at_once():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -352,9 +343,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
def test_dump_data_sources_dumps_all_databases_separately_when_format_configured(): def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all', 'format': 'sql'}] databases = [{'name': 'all', 'format': 'sql'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -862,9 +850,6 @@ def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
def test_dump_data_sources_errors_for_missing_all_databases(): def test_dump_data_sources_errors_for_missing_all_databases():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.os).should_receive('environ').and_return({'USER': 'root'}) flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})
@ -888,9 +873,6 @@ def test_dump_data_sources_errors_for_missing_all_databases():
def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run(): def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.os).should_receive('environ').and_return({'USER': 'root'}) flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})

View file

@ -24,9 +24,6 @@ def test_use_streaming_false_for_no_databases():
def test_dump_data_sources_runs_mongodump_for_each_database(): def test_dump_data_sources_runs_mongodump_for_each_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -56,9 +53,6 @@ def test_dump_data_sources_runs_mongodump_for_each_database():
def test_dump_data_sources_with_dry_run_skips_mongodump(): def test_dump_data_sources_with_dry_run_skips_mongodump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return( flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
@ -81,9 +75,6 @@ def test_dump_data_sources_with_dry_run_skips_mongodump():
def test_dump_data_sources_runs_mongodump_with_hostname_and_port(): def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -120,9 +111,6 @@ def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
def test_dump_data_sources_runs_mongodump_with_username_and_password(): def test_dump_data_sources_runs_mongodump_with_username_and_password():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{ {
'name': 'foo', 'name': 'foo',
@ -174,9 +162,6 @@ def test_dump_data_sources_runs_mongodump_with_username_and_password():
def test_dump_data_sources_runs_mongodump_with_directory_format(): def test_dump_data_sources_runs_mongodump_with_directory_format():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'format': 'directory'}] databases = [{'name': 'foo', 'format': 'directory'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return( flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
@ -204,9 +189,6 @@ def test_dump_data_sources_runs_mongodump_with_directory_format():
def test_dump_data_sources_runs_mongodump_with_options(): def test_dump_data_sources_runs_mongodump_with_options():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'options': '--stuff=such'}] databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -240,9 +222,6 @@ def test_dump_data_sources_runs_mongodump_with_options():
def test_dump_data_sources_runs_mongodumpall_for_all_databases(): def test_dump_data_sources_runs_mongodumpall_for_all_databases():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')

View file

@ -134,9 +134,6 @@ def test_use_streaming_false_for_no_databases():
def test_dump_data_sources_dumps_each_database(): def test_dump_data_sources_dumps_each_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -175,9 +172,6 @@ def test_dump_data_sources_dumps_each_database():
def test_dump_data_sources_dumps_with_password(): def test_dump_data_sources_dumps_with_password():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'} database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -212,9 +206,6 @@ def test_dump_data_sources_dumps_with_password():
def test_dump_data_sources_dumps_all_databases_at_once(): def test_dump_data_sources_dumps_all_databases_at_once():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -246,9 +237,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
def test_dump_data_sources_dumps_all_databases_separately_when_format_configured(): def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all', 'format': 'sql'}] databases = [{'name': 'all', 'format': 'sql'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -774,9 +762,6 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
def test_dump_data_sources_errors_for_missing_all_databases(): def test_dump_data_sources_errors_for_missing_all_databases():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.os).should_receive('environ').and_return({'USER': 'root'}) flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})
@ -800,9 +785,6 @@ def test_dump_data_sources_errors_for_missing_all_databases():
def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run(): def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.os).should_receive('environ').and_return({'USER': 'root'}) flexmock(module.os).should_receive('environ').and_return({'USER': 'root'})

View file

@ -236,9 +236,6 @@ def test_use_streaming_false_for_no_databases():
def test_dump_data_sources_runs_pg_dump_for_each_database(): def test_dump_data_sources_runs_pg_dump_for_each_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -287,9 +284,6 @@ def test_dump_data_sources_runs_pg_dump_for_each_database():
def test_dump_data_sources_raises_when_no_database_names_to_dump(): def test_dump_data_sources_raises_when_no_database_names_to_dump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -307,9 +301,6 @@ def test_dump_data_sources_raises_when_no_database_names_to_dump():
def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump(): def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -326,9 +317,6 @@ def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
def test_dump_data_sources_with_duplicate_dump_skips_pg_dump(): def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -356,9 +344,6 @@ def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
def test_dump_data_sources_with_dry_run_skips_pg_dump(): def test_dump_data_sources_with_dry_run_skips_pg_dump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -389,9 +374,6 @@ def test_dump_data_sources_with_dry_run_skips_pg_dump():
def test_dump_data_sources_runs_pg_dump_with_hostname_and_port(): def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -438,9 +420,6 @@ def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
def test_dump_data_sources_runs_pg_dump_with_username_and_password(): def test_dump_data_sources_runs_pg_dump_with_username_and_password():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}] databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return( flexmock(module).should_receive('make_environment').and_return(
@ -487,9 +466,6 @@ def test_dump_data_sources_runs_pg_dump_with_username_and_password():
def test_dump_data_sources_with_username_injection_attack_gets_escaped(): def test_dump_data_sources_with_username_injection_attack_gets_escaped():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'username': 'postgres; naughty-command', 'password': 'trustsome1'}] databases = [{'name': 'foo', 'username': 'postgres; naughty-command', 'password': 'trustsome1'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return( flexmock(module).should_receive('make_environment').and_return(
@ -536,9 +512,6 @@ def test_dump_data_sources_with_username_injection_attack_gets_escaped():
def test_dump_data_sources_runs_pg_dump_with_directory_format(): def test_dump_data_sources_runs_pg_dump_with_directory_format():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'format': 'directory'}] databases = [{'name': 'foo', 'format': 'directory'}]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('make_dump_path').and_return('')
@ -583,9 +556,6 @@ def test_dump_data_sources_runs_pg_dump_with_directory_format():
def test_dump_data_sources_runs_pg_dump_with_string_compression(): def test_dump_data_sources_runs_pg_dump_with_string_compression():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'compression': 'winrar'}] databases = [{'name': 'foo', 'compression': 'winrar'}]
processes = [flexmock()] processes = [flexmock()]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -633,9 +603,6 @@ def test_dump_data_sources_runs_pg_dump_with_string_compression():
def test_dump_data_sources_runs_pg_dump_with_integer_compression(): def test_dump_data_sources_runs_pg_dump_with_integer_compression():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'compression': 0}] databases = [{'name': 'foo', 'compression': 0}]
processes = [flexmock()] processes = [flexmock()]
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -683,9 +650,6 @@ def test_dump_data_sources_runs_pg_dump_with_integer_compression():
def test_dump_data_sources_runs_pg_dump_with_options(): def test_dump_data_sources_runs_pg_dump_with_options():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'options': '--stuff=such'}] databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -729,9 +693,6 @@ def test_dump_data_sources_runs_pg_dump_with_options():
def test_dump_data_sources_runs_pg_dumpall_for_all_databases(): def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'all'}] databases = [{'name': 'all'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})
@ -764,9 +725,6 @@ def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
def test_dump_data_sources_runs_non_default_pg_dump(): def test_dump_data_sources_runs_non_default_pg_dump():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'name': 'foo', 'pg_dump_command': 'special_pg_dump --compress *'}] databases = [{'name': 'foo', 'pg_dump_command': 'special_pg_dump --compress *'}]
process = flexmock() process = flexmock()
flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_environment').and_return({'PGSSLMODE': 'disable'})

View file

@ -17,9 +17,6 @@ def test_use_streaming_false_for_no_databases():
def test_dump_data_sources_logs_and_skips_if_dump_already_exists(): def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'path': '/path/to/database', 'name': 'database'}] databases = [{'path': '/path/to/database', 'name': 'database'}]
flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic') flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic')
@ -44,9 +41,6 @@ def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
def test_dump_data_sources_dumps_each_database(): def test_dump_data_sources_dumps_each_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{'path': '/path/to/database1', 'name': 'database1'}, {'path': '/path/to/database1', 'name': 'database1'},
{'path': '/path/to/database2', 'name': 'database2'}, {'path': '/path/to/database2', 'name': 'database2'},
@ -77,9 +71,6 @@ def test_dump_data_sources_dumps_each_database():
def test_dump_data_sources_with_path_injection_attack_gets_escaped(): def test_dump_data_sources_with_path_injection_attack_gets_escaped():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{'path': '/path/to/database1; naughty-command', 'name': 'database1'}, {'path': '/path/to/database1; naughty-command', 'name': 'database1'},
] ]
@ -117,9 +108,6 @@ def test_dump_data_sources_with_path_injection_attack_gets_escaped():
def test_dump_data_sources_runs_non_default_sqlite_with_path_injection_attack_gets_escaped(): def test_dump_data_sources_runs_non_default_sqlite_with_path_injection_attack_gets_escaped():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{ {
'path': '/path/to/database1; naughty-command', 'path': '/path/to/database1; naughty-command',
@ -162,9 +150,6 @@ def test_dump_data_sources_runs_non_default_sqlite_with_path_injection_attack_ge
def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database(): def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{'path': '/path/to/database1', 'name': 'database1'}, {'path': '/path/to/database1', 'name': 'database1'},
] ]
@ -193,9 +178,6 @@ def test_dump_data_sources_with_non_existent_path_warns_and_dumps_database():
def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases(): def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [ databases = [
{'path': '/path/to/database1', 'name': 'all'}, {'path': '/path/to/database1', 'name': 'all'},
] ]
@ -226,9 +208,6 @@ def test_dump_data_sources_with_name_all_warns_and_dumps_all_databases():
def test_dump_data_sources_does_not_dump_if_dry_run(): def test_dump_data_sources_does_not_dump_if_dry_run():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
databases = [{'path': '/path/to/database', 'name': 'database'}] databases = [{'path': '/path/to/database', 'name': 'database'}]
flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic') flexmock(module).should_receive('make_dump_path').and_return('/run/borgmatic')

View file

@ -296,9 +296,6 @@ def test_make_borg_snapshot_pattern_includes_slashdot_hack_and_stripped_pattern_
def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns(): def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
dataset = flexmock( dataset = flexmock(
name='dataset', name='dataset',
mount_point='/mnt/dataset', mount_point='/mnt/dataset',
@ -341,9 +338,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
def test_dump_data_sources_with_no_datasets_skips_snapshots(): def test_dump_data_sources_with_no_datasets_skips_snapshots():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
flexmock(module).should_receive('get_datasets_to_backup').and_return(()) flexmock(module).should_receive('get_datasets_to_backup').and_return(())
flexmock(module.os).should_receive('getpid').and_return(1234) flexmock(module.os).should_receive('getpid').and_return(1234)
flexmock(module).should_receive('snapshot_dataset').never() flexmock(module).should_receive('snapshot_dataset').never()
@ -366,9 +360,6 @@ def test_dump_data_sources_with_no_datasets_skips_snapshots():
def test_dump_data_sources_uses_custom_commands(): def test_dump_data_sources_uses_custom_commands():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
dataset = flexmock( dataset = flexmock(
name='dataset', name='dataset',
mount_point='/mnt/dataset', mount_point='/mnt/dataset',
@ -418,9 +409,6 @@ def test_dump_data_sources_uses_custom_commands():
def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patterns(): def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
flexmock(module).should_receive('get_datasets_to_backup').and_return( flexmock(module).should_receive('get_datasets_to_backup').and_return(
(flexmock(name='dataset', mount_point='/mnt/dataset'),) (flexmock(name='dataset', mount_point='/mnt/dataset'),)
) )
@ -445,9 +433,6 @@ def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patter
def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns(): def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained_patterns():
flexmock(module.borgmatic.hooks.command).should_receive('Before_after_hooks').and_return(
flexmock()
)
dataset = flexmock( dataset = flexmock(
name='dataset', name='dataset',
mount_point='/mnt/dataset', mount_point='/mnt/dataset',

View file

@ -133,121 +133,6 @@ def test_make_environment_with_pyinstaller_and_LD_LIBRARY_PATH_ORIG_copies_it_in
}, },
), ),
), ),
(
(
{
'before': 'dump_data_sources',
'hooks': ['postgresql'],
'run': ['foo'],
},
{
'before': 'dump_data_sources',
'hooks': ['lvm'],
'run': ['bar'],
},
{
'after': 'dump_data_sources',
'hooks': ['lvm'],
'run': ['baz'],
},
),
{
'before': 'dump_data_sources',
'hook_name': 'lvm',
},
(
{
'before': 'dump_data_sources',
'hooks': ['lvm'],
'run': ['bar'],
},
),
),
(
(
{
'before': 'dump_data_sources',
'run': ['foo'],
},
{
'before': 'dump_data_sources',
'run': ['bar'],
},
{
'after': 'dump_data_sources',
'run': ['baz'],
},
),
{
'before': 'dump_data_sources',
'hook_name': 'lvm',
},
(
{
'before': 'dump_data_sources',
'run': ['foo'],
},
{
'before': 'dump_data_sources',
'run': ['bar'],
},
),
),
(
(
{
'before': 'dump_data_sources',
'hooks': ['postgresql', 'zfs', 'lvm'],
'run': ['foo'],
},
),
{
'before': 'dump_data_sources',
'hook_name': 'lvm',
},
(
{
'before': 'dump_data_sources',
'hooks': ['postgresql', 'zfs', 'lvm'],
'run': ['foo'],
},
),
),
(
(
{
'before': 'action',
'when': ['create'],
'run': ['foo'],
},
{
'before': 'action',
'when': ['prune'],
'run': ['bar'],
},
{
'before': 'action',
'when': ['compact'],
'run': ['baz'],
},
),
{
'before': 'action',
'action_names': ['create', 'compact', 'extract'],
},
(
{
'before': 'action',
'when': ['create'],
'run': ['foo'],
},
{
'before': 'action',
'when': ['compact'],
'run': ['baz'],
},
),
),
( (
( (
{ {