0
0
Fork 0
mirror of https://projects.torsion.org/witten/borgmatic.git synced 2025-04-04 21:05:30 +00:00

Log the repository path or label on every relevant log message, not just some logs ().

This commit is contained in:
Dan Helfman 2025-01-25 14:01:25 -08:00
parent 1232ba8045
commit cc7e01be68
53 changed files with 403 additions and 558 deletions

3
NEWS
View file

@ -1,3 +1,6 @@
1.9.9.dev0
* #635: Log the repository path or label on every relevant log message, not just some logs.
1.9.8
* #979: Fix root patterns so they don't have an invalid "sh:" prefix before getting passed to Borg.
* Expand the recent contributors documentation section to include ticket submitters—not just code

View file

@ -23,7 +23,7 @@ def run_borg(
repository, borg_arguments.repository
):
logger.info(
f'{repository.get("label", repository["path"])}: Running arbitrary Borg command'
'Running arbitrary Borg command'
)
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
repository['path'],

View file

@ -22,7 +22,7 @@ def run_break_lock(
repository, break_lock_arguments.repository
):
logger.info(
f'{repository.get("label", repository["path"])}: Breaking repository and cache locks'
'Breaking repository and cache locks'
)
borgmatic.borg.break_lock.break_lock(
repository['path'],

View file

@ -25,7 +25,7 @@ def run_change_passphrase(
)
):
logger.info(
f'{repository.get("label", repository["path"])}: Changing repository passphrase'
'Changing repository passphrase'
)
borgmatic.borg.change_passphrase.change_passphrase(
repository['path'],

View file

@ -363,7 +363,6 @@ def collect_spot_check_source_paths(
borgmatic.hooks.dispatch.call_hooks(
'use_streaming',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
).values()
)
@ -468,15 +467,14 @@ def compare_spot_check_hashes(
global_arguments,
local_path,
remote_path,
log_prefix,
source_paths,
):
'''
Given a repository configuration dict, the name of the latest archive, a configuration dict, the
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the
remote Borg path, a log label, and spot check source paths, compare the hashes for a sampling of
the source paths with hashes from corresponding paths in the given archive. Return a sequence of
the paths that fail that hash comparison.
remote Borg path, and spot check source paths, compare the hashes for a sampling of the source
paths with hashes from corresponding paths in the given archive. Return a sequence of the paths
that fail that hash comparison.
'''
# Based on the configured sample percentage, come up with a list of random sample files from the
# source directories.
@ -492,7 +490,7 @@ def compare_spot_check_hashes(
if os.path.exists(os.path.join(working_directory or '', source_path))
}
logger.debug(
f'{log_prefix}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
f'Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
)
source_sample_paths_iterator = iter(source_sample_paths)
@ -580,8 +578,7 @@ def spot_check(
disk to those stored in the latest archive. If any differences are beyond configured tolerances,
then the check fails.
'''
log_prefix = f'{repository.get("label", repository["path"])}'
logger.debug(f'{log_prefix}: Running spot check')
logger.debug('Running spot check')
try:
spot_check_config = next(
@ -604,7 +601,7 @@ def spot_check(
remote_path,
borgmatic_runtime_directory,
)
logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
logger.debug(f'{len(source_paths)} total source paths for spot check')
archive = borgmatic.borg.repo_list.resolve_archive_name(
repository['path'],
@ -615,7 +612,7 @@ def spot_check(
local_path,
remote_path,
)
logger.debug(f'{log_prefix}: Using archive {archive} for spot check')
logger.debug(f'Using archive {archive} for spot check')
archive_paths = collect_spot_check_archive_paths(
repository,
@ -627,11 +624,11 @@ def spot_check(
remote_path,
borgmatic_runtime_directory,
)
logger.debug(f'{log_prefix}: {len(archive_paths)} total archive paths for spot check')
logger.debug(f'{len(archive_paths)} total archive paths for spot check')
if len(source_paths) == 0:
logger.debug(
f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths)) or "none"}'
f'Paths in latest archive but not source paths: {", ".join(set(archive_paths)) or "none"}'
)
raise ValueError(
'Spot check failed: There are no source paths to compare against the archive'
@ -644,10 +641,10 @@ def spot_check(
if count_delta_percentage > spot_check_config['count_tolerance_percentage']:
rootless_source_paths = set(path.lstrip(os.path.sep) for path in source_paths)
logger.debug(
f'{log_prefix}: Paths in source paths but not latest archive: {", ".join(rootless_source_paths - set(archive_paths)) or "none"}'
f'Paths in source paths but not latest archive: {", ".join(rootless_source_paths - set(archive_paths)) or "none"}'
)
logger.debug(
f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - rootless_source_paths) or "none"}'
f'Paths in latest archive but not source paths: {", ".join(set(archive_paths) - rootless_source_paths) or "none"}'
)
raise ValueError(
f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)'
@ -661,25 +658,24 @@ def spot_check(
global_arguments,
local_path,
remote_path,
log_prefix,
source_paths,
)
# Error if the percentage of failing hashes exceeds the configured tolerance percentage.
logger.debug(f'{log_prefix}: {len(failing_paths)} non-matching spot check hashes')
logger.debug(f'{len(failing_paths)} non-matching spot check hashes')
data_tolerance_percentage = spot_check_config['data_tolerance_percentage']
failing_percentage = (len(failing_paths) / len(source_paths)) * 100
if failing_percentage > data_tolerance_percentage:
logger.debug(
f'{log_prefix}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
f'Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
)
raise ValueError(
f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)'
)
logger.info(
f'{log_prefix}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
f'Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
)
@ -713,8 +709,7 @@ def run_check(
**hook_context,
)
log_prefix = repository.get('label', repository['path'])
logger.info(f'{log_prefix}: Running consistency checks')
logger.info(f'Running consistency checks')
repository_id = borgmatic.borg.check.get_repository_id(
repository['path'],
@ -768,7 +763,7 @@ def run_check(
if 'spot' in checks:
with borgmatic.config.paths.Runtime_directory(
config, log_prefix
config
) as borgmatic_runtime_directory:
spot_check(
repository,

View file

@ -38,7 +38,7 @@ def run_compact(
)
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
logger.info(
f'{repository.get("label", repository["path"])}: Compacting segments{dry_run_label}'
f'Compacting segments{dry_run_label}'
)
borgmatic.borg.compact.compact_segments(
global_arguments.dry_run,
@ -54,8 +54,9 @@ def run_compact(
)
else: # pragma: nocover
logger.info(
f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)'
'Skipping compact (only available/needed in Borg 1.2+)'
)
borgmatic.hooks.command.execute_hook(
config.get('after_compact'),
config.get('umask'),

View file

@ -45,7 +45,6 @@ def get_config_paths(archive_name, bootstrap_arguments, global_arguments, local_
# still want to support reading the manifest from previously created archives as well.
with borgmatic.config.paths.Runtime_directory(
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory},
bootstrap_arguments.repository,
) as borgmatic_runtime_directory:
for base_directory in (
'borgmatic',

View file

@ -283,17 +283,15 @@ def run_create(
**hook_context,
)
log_prefix = repository.get('label', repository['path'])
logger.info(f'{log_prefix}: Creating archive{dry_run_label}')
logger.info(f'Creating archive{dry_run_label}')
working_directory = borgmatic.config.paths.get_working_directory(config)
with borgmatic.config.paths.Runtime_directory(
config, log_prefix
config
) as borgmatic_runtime_directory:
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
borgmatic_runtime_directory,
global_arguments.dry_run,
@ -302,7 +300,6 @@ def run_create(
active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_data_sources',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
config_paths,
borgmatic_runtime_directory,
@ -339,7 +336,6 @@ def run_create(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_data_source_dumps',
config,
config_filename,
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
borgmatic_runtime_directory,
global_arguments.dry_run,

View file

@ -23,7 +23,7 @@ def run_delete(
if delete_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, delete_arguments.repository
):
logger.answer(f'{repository.get("label", repository["path"])}: Deleting archives')
logger.answer('Deleting archives')
archive_name = (
borgmatic.borg.repo_list.resolve_archive_name(

View file

@ -21,7 +21,7 @@ def run_export_key(
if export_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, export_arguments.repository
):
logger.info(f'{repository.get("label", repository["path"])}: Exporting repository key')
logger.info('Exporting repository key')
borgmatic.borg.export_key.export_key(
repository['path'],
config,

View file

@ -23,7 +23,7 @@ def run_export_tar(
repository, export_tar_arguments.repository
):
logger.info(
f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file'
f'Exporting archive {export_tar_arguments.archive} as tar file'
)
borgmatic.borg.export_tar.export_tar_archive(
global_arguments.dry_run,

View file

@ -34,7 +34,7 @@ def run_extract(
repository, extract_arguments.repository
):
logger.info(
f'{repository.get("label", repository["path"])}: Extracting archive {extract_arguments.archive}'
f'Extracting archive {extract_arguments.archive}'
)
borgmatic.borg.extract.extract_archive(
global_arguments.dry_run,

View file

@ -28,7 +28,7 @@ def run_info(
):
if not info_arguments.json:
logger.answer(
f'{repository.get("label", repository["path"])}: Displaying archive summary information'
'Displaying archive summary information'
)
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
repository['path'],

View file

@ -27,9 +27,9 @@ def run_list(
):
if not list_arguments.json:
if list_arguments.find_paths: # pragma: no cover
logger.answer(f'{repository.get("label", repository["path"])}: Searching archives')
logger.answer('Searching archives')
elif not list_arguments.archive: # pragma: no cover
logger.answer(f'{repository.get("label", repository["path"])}: Listing archives')
logger.answer('Listing archives')
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
repository['path'],

View file

@ -24,10 +24,10 @@ def run_mount(
):
if mount_arguments.archive:
logger.info(
f'{repository.get("label", repository["path"])}: Mounting archive {mount_arguments.archive}'
f'Mounting archive {mount_arguments.archive}'
)
else: # pragma: nocover
logger.info(f'{repository.get("label", repository["path"])}: Mounting repository')
logger.info('Mounting repository')
borgmatic.borg.mount.mount_archive(
repository['path'],

View file

@ -35,7 +35,7 @@ def run_prune(
global_arguments.dry_run,
**hook_context,
)
logger.info(f'{repository.get("label", repository["path"])}: Pruning archives{dry_run_label}')
logger.info(f'Pruning archives{dry_run_label}')
borgmatic.borg.prune.prune_archives(
global_arguments.dry_run,
repository['path'],

View file

@ -23,7 +23,7 @@ def run_repo_create(
):
return
logger.info(f'{repository.get("label", repository["path"])}: Creating repository')
logger.info('Creating repository')
borgmatic.borg.repo_create.create_repository(
global_arguments.dry_run,
repository['path'],

View file

@ -21,7 +21,7 @@ def run_repo_delete(
repository, repo_delete_arguments.repository
):
logger.answer(
f'{repository.get("label", repository["path"])}: Deleting repository'
'Deleting repository'
+ (' cache' if repo_delete_arguments.cache_only else '')
)

View file

@ -26,7 +26,7 @@ def run_repo_info(
):
if not repo_info_arguments.json:
logger.answer(
f'{repository.get("label", repository["path"])}: Displaying repository summary information'
'Displaying repository summary information'
)
json_output = borgmatic.borg.repo_info.display_repository_info(

View file

@ -25,7 +25,7 @@ def run_repo_list(
repository, repo_list_arguments.repository
):
if not repo_list_arguments.json:
logger.answer(f'{repository.get("label", repository["path"])}: Listing repository')
logger.answer('Listing repository')
json_output = borgmatic.borg.repo_list.list_repository(
repository['path'],

View file

@ -71,10 +71,10 @@ def render_dump_metadata(dump):
return metadata
def get_configured_data_source(config, restore_dump, log_prefix):
def get_configured_data_source(config, restore_dump):
'''
Search in the given configuration dict for dumps corresponding to the given dump to restore. If
there are multiple matches, error. Log using the given log prefix.
there are multiple matches, error.
Return the found data source as a data source configuration dict or None if not found.
'''
@ -91,7 +91,6 @@ def get_configured_data_source(config, restore_dump, log_prefix):
borgmatic.hooks.dispatch.call_hook(
function_name='get_default_port',
config=config,
log_prefix=log_prefix,
hook_name=hook_name,
),
)
@ -174,13 +173,12 @@ def restore_single_dump(
)
logger.info(
f'{repository.get("label", repository["path"])}: Restoring data source {dump_metadata}'
f'Restoring data source {dump_metadata}'
)
dump_patterns = borgmatic.hooks.dispatch.call_hooks(
'make_data_source_dump_patterns',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
borgmatic_runtime_directory,
data_source['name'],
@ -227,7 +225,6 @@ def restore_single_dump(
borgmatic.hooks.dispatch.call_hook(
function_name='restore_data_source_dump',
config=config,
log_prefix=repository['path'],
hook_name=hook_name,
data_source=data_source,
dry_run=global_arguments.dry_run,
@ -319,7 +316,7 @@ def collect_dumps_from_archive(
break
else:
logger.warning(
f'{repository}: Ignoring invalid data source dump path "{dump_path}" in archive {archive}'
f'Ignoring invalid data source dump path "{dump_path}" in archive {archive}'
)
return dumps_from_archive
@ -444,16 +441,14 @@ def run_restore(
):
return
log_prefix = repository.get('label', repository['path'])
logger.info(f'{log_prefix}: Restoring data sources from archive {restore_arguments.archive}')
logger.info(f'Restoring data sources from archive {restore_arguments.archive}')
with borgmatic.config.paths.Runtime_directory(
config, log_prefix
config
) as borgmatic_runtime_directory:
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
borgmatic_runtime_directory,
global_arguments.dry_run,
@ -494,7 +489,6 @@ def run_restore(
found_data_source = get_configured_data_source(
config,
restore_dump,
log_prefix=repository['path'],
)
# For a dump that wasn't found via an exact match in the configuration, try to fallback
@ -503,7 +497,6 @@ def run_restore(
found_data_source = get_configured_data_source(
config,
Dump(restore_dump.hook_name, 'all', restore_dump.hostname, restore_dump.port),
log_prefix=repository['path'],
)
if not found_data_source:
@ -531,7 +524,6 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
borgmatic_runtime_directory,
global_arguments.dry_run,

View file

@ -18,7 +18,7 @@ def run_transfer(
Run the "transfer" action for the given repository.
'''
logger.info(
f'{repository.get("label", repository["path"])}: Transferring archives to repository'
'Transferring archives to repository'
)
borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run,

View file

@ -20,14 +20,12 @@ from borgmatic.execute import (
logger = logging.getLogger(__name__)
def write_patterns_file(patterns, borgmatic_runtime_directory, log_prefix, patterns_file=None):
def write_patterns_file(patterns, borgmatic_runtime_directory, patterns_file=None):
'''
Given a sequence of patterns as borgmatic.borg.pattern.Pattern instances, write them to a named
temporary file in the given borgmatic runtime directory and return the file object so it can
continue to exist on disk as long as the caller needs it.
Use the given log prefix in any logging.
If an optional open pattern file is given, append to it instead of making a new temporary file.
Return None if no patterns are provided.
'''
@ -43,7 +41,7 @@ def write_patterns_file(patterns, borgmatic_runtime_directory, log_prefix, patte
f'{pattern.type.value} {pattern.style.value}{":" if pattern.style.value else ""}{pattern.path}'
for pattern in patterns
)
logger.debug(f'{log_prefix}: Writing patterns to {patterns_file.name}:\n{patterns_output}')
logger.debug(f'Writing patterns to {patterns_file.name}:\n{patterns_output}')
patterns_file.write(patterns_output)
patterns_file.flush()
@ -217,9 +215,7 @@ def make_base_create_command(
if config.get('source_directories_must_exist', False):
check_all_root_patterns_exist(patterns)
patterns_file = write_patterns_file(
patterns, borgmatic_runtime_directory, log_prefix=repository_path
)
patterns_file = write_patterns_file(patterns, borgmatic_runtime_directory)
checkpoint_interval = config.get('checkpoint_interval', None)
checkpoint_volume = config.get('checkpoint_volume', None)
chunker_params = config.get('chunker_params', None)
@ -334,7 +330,6 @@ def make_base_create_command(
for special_file_path in special_file_paths
),
borgmatic_runtime_directory,
log_prefix=repository_path,
patterns_file=patterns_file,
)

View file

@ -39,7 +39,7 @@ from borgmatic.commands.arguments import parse_arguments
from borgmatic.config import checks, collect, validate
from borgmatic.hooks import command, dispatch
from borgmatic.hooks.monitoring import monitor
from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup
from borgmatic.logger import DISABLED, add_custom_log_levels, configure_logging, should_do_markup, set_log_prefix
from borgmatic.signals import configure_signals
from borgmatic.verbosity import verbosity_to_log_level
@ -86,12 +86,12 @@ def run_configuration(config_filename, config, config_paths, arguments):
if skip_actions:
logger.debug(
f"{config_filename}: Skipping {'/'.join(skip_actions)} action{'s' if len(skip_actions) > 1 else ''} due to configured skip_actions"
f"Skipping {'/'.join(skip_actions)} action{'s' if len(skip_actions) > 1 else ''} due to configured skip_actions"
)
try:
local_borg_version = borg_version.local_borg_version(config, local_path)
logger.debug(f'{config_filename}: Borg {local_borg_version}')
logger.debug(f'Borg {local_borg_version}')
except (OSError, CalledProcessError, ValueError) as error:
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
return
@ -101,7 +101,6 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'initialize_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitoring_log_level,
global_arguments.dry_run,
@ -110,14 +109,13 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'ping_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitor.State.START,
monitoring_log_level,
global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
if command.considered_soft_failure(config_filename, error):
if command.considered_soft_failure(error):
return
encountered_error = error
@ -130,55 +128,59 @@ def run_configuration(config_filename, config, config_paths, arguments):
(repo, 0),
)
while not repo_queue.empty():
repository, retry_num = repo_queue.get()
logger.debug(
f'{repository.get("label", repository["path"])}: Running actions for repository'
)
timeout = retry_num * retry_wait
if timeout:
logger.warning(
f'{repository.get("label", repository["path"])}: Sleeping {timeout}s before next retry'
try:
while not repo_queue.empty():
repository, retry_num = repo_queue.get()
set_log_prefix(repository.get('label', repository['path']))
logger.debug(
'Running actions for repository'
)
time.sleep(timeout)
try:
yield from run_actions(
arguments=arguments,
config_filename=config_filename,
config=config,
config_paths=config_paths,
local_path=local_path,
remote_path=remote_path,
local_borg_version=local_borg_version,
repository=repository,
)
except (OSError, CalledProcessError, ValueError) as error:
if retry_num < retries:
repo_queue.put(
(repository, retry_num + 1),
)
tuple( # Consume the generator so as to trigger logging.
log_error_records(
f'{repository.get("label", repository["path"])}: Error running actions for repository',
error,
levelno=logging.WARNING,
log_command_error_output=True,
)
)
timeout = retry_num * retry_wait
if timeout:
logger.warning(
f'{repository.get("label", repository["path"])}: Retrying... attempt {retry_num + 1}/{retries}'
f'Sleeping {timeout}s before next retry'
)
continue
time.sleep(timeout)
try:
yield from run_actions(
arguments=arguments,
config_filename=config_filename,
config=config,
config_paths=config_paths,
local_path=local_path,
remote_path=remote_path,
local_borg_version=local_borg_version,
repository=repository,
)
except (OSError, CalledProcessError, ValueError) as error:
if retry_num < retries:
repo_queue.put(
(repository, retry_num + 1),
)
tuple( # Consume the generator so as to trigger logging.
log_error_records(
f'{repository.get("label", repository["path"])}: Error running actions for repository',
error,
levelno=logging.WARNING,
log_command_error_output=True,
)
)
logger.warning(
f'Retrying... attempt {retry_num + 1}/{retries}'
)
continue
if command.considered_soft_failure(config_filename, error):
continue
if command.considered_soft_failure(error):
continue
yield from log_error_records(
f'{repository.get("label", repository["path"])}: Error running actions for repository',
error,
)
encountered_error = error
error_repository = repository['path']
yield from log_error_records(
f'Error running actions for repository',
error,
)
encountered_error = error
error_repository = repository['path']
finally:
set_log_prefix(config_filename)
try:
if monitoring_hooks_are_activated:
@ -186,14 +188,13 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'ping_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitor.State.LOG,
monitoring_log_level,
global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
if not command.considered_soft_failure(config_filename, error):
if not command.considered_soft_failure(error):
encountered_error = error
yield from log_error_records(f'{repository["path"]}: Error pinging monitor', error)
@ -203,7 +204,6 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'ping_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitor.State.FINISH,
monitoring_log_level,
@ -212,13 +212,12 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'destroy_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitoring_log_level,
global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
if command.considered_soft_failure(config_filename, error):
if command.considered_soft_failure(error):
return
encountered_error = error
@ -239,7 +238,6 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'ping_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitor.State.FAIL,
monitoring_log_level,
@ -248,13 +246,12 @@ def run_configuration(config_filename, config, config_paths, arguments):
dispatch.call_hooks(
'destroy_monitor',
config,
config_filename,
dispatch.Hook_type.MONITORING,
monitoring_log_level,
global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
if command.considered_soft_failure(config_filename, error):
if command.considered_soft_failure(error):
return
yield from log_error_records(f'{config_filename}: Error running on-error hook', error)
@ -819,23 +816,28 @@ def collect_configuration_run_summary_logs(configs, config_paths, arguments):
# Execute the actions corresponding to each configuration file.
json_results = []
for config_filename, config in configs.items():
results = list(run_configuration(config_filename, config, config_paths, arguments))
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
if error_logs:
yield from log_error_records(f'{config_filename}: An error occurred')
yield from error_logs
else:
yield logging.makeLogRecord(
dict(
levelno=logging.INFO,
levelname='INFO',
msg=f'{config_filename}: Successfully ran configuration file',
try:
for config_filename, config in configs.items():
set_log_prefix(config_filename)
results = list(run_configuration(config_filename, config, config_paths, arguments))
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
if error_logs:
yield from log_error_records('An error occurred')
yield from error_logs
else:
yield logging.makeLogRecord(
dict(
levelno=logging.INFO,
levelname='INFO',
msg='Successfully ran configuration file',
)
)
)
if results:
json_results.extend(results)
if results:
json_results.extend(results)
finally:
set_log_prefix(None)
if 'umount' in arguments:
logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")

View file

@ -76,14 +76,13 @@ class Runtime_directory:
automatically gets cleaned up as necessary.
'''
def __init__(self, config, log_prefix):
def __init__(self, config):
'''
Given a configuration dict and a log prefix, determine the borgmatic runtime directory,
creating a secure, temporary directory within it if necessary. Defaults to
$XDG_RUNTIME_DIR/./borgmatic or $RUNTIME_DIRECTORY/./borgmatic or
$TMPDIR/borgmatic-[random]/./borgmatic or $TEMP/borgmatic-[random]/./borgmatic or
/tmp/borgmatic-[random]/./borgmatic where "[random]" is a randomly generated string intended
to avoid path collisions.
Given a configuration dict determine the borgmatic runtime directory, creating a secure,
temporary directory within it if necessary. Defaults to $XDG_RUNTIME_DIR/./borgmatic or
$RUNTIME_DIRECTORY/./borgmatic or $TMPDIR/borgmatic-[random]/./borgmatic or
$TEMP/borgmatic-[random]/./borgmatic or /tmp/borgmatic-[random]/./borgmatic where "[random]"
is a randomly generated string intended to avoid path collisions.
If XDG_RUNTIME_DIR or RUNTIME_DIRECTORY is set and already ends in "/borgmatic", then don't
tack on a second "/borgmatic" path component.
@ -127,7 +126,7 @@ class Runtime_directory:
)
os.makedirs(self.runtime_path, mode=0o700, exist_ok=True)
logger.debug(f'{log_prefix}: Using runtime directory {os.path.normpath(self.runtime_path)}')
logger.debug(f'Using runtime directory {os.path.normpath(self.runtime_path)}')
def __enter__(self):
'''

View file

@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
SOFT_FAIL_EXIT_CODE = 75
def interpolate_context(config_filename, hook_description, command, context):
def interpolate_context(hook_description, command, context):
'''
Given a config filename, a hook description, a single hook command, and a dict of context
names/values, interpolate the values by "{name}" into the command and return the result.
@ -22,7 +22,7 @@ def interpolate_context(config_filename, hook_description, command, context):
for unsupported_variable in re.findall(r'{\w+}', command):
logger.warning(
f"{config_filename}: Variable '{unsupported_variable}' is not supported in {hook_description} hook"
f"Variable '{unsupported_variable}' is not supported in {hook_description} hook"
)
return command
@ -54,26 +54,26 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
Raise subprocesses.CalledProcessError if an error occurs in a hook.
'''
if not commands:
logger.debug(f'{config_filename}: No commands to run for {description} hook')
logger.debug(f'No commands to run for {description} hook')
return
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
context['configuration_filename'] = config_filename
commands = [
interpolate_context(config_filename, description, command, context) for command in commands
interpolate_context(description, command, context) for command in commands
]
if len(commands) == 1:
logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}')
logger.info(f'Running command for {description} hook{dry_run_label}')
else:
logger.info(
f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}',
f'Running {len(commands)} commands for {description} hook{dry_run_label}',
)
if umask:
parsed_umask = int(str(umask), 8)
logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}')
logger.debug(f'Set hook umask to {oct(parsed_umask)}')
original_umask = os.umask(parsed_umask)
else:
original_umask = None
@ -94,7 +94,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
os.umask(original_umask)
def considered_soft_failure(config_filename, error):
def considered_soft_failure(error):
'''
Given a configuration filename and an exception object, return whether the exception object
represents a subprocess.CalledProcessError with a return code of SOFT_FAIL_EXIT_CODE. If so,
@ -106,7 +106,7 @@ def considered_soft_failure(config_filename, error):
if exit_code == SOFT_FAIL_EXIT_CODE:
logger.info(
f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining repository actions',
f'Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining repository actions',
)
return True

View file

@ -10,7 +10,7 @@ import borgmatic.config.paths
logger = logging.getLogger(__name__)
def use_streaming(hook_config, config, log_prefix): # pragma: no cover
def use_streaming(hook_config, config): # pragma: no cover
'''
Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
'''
@ -20,18 +20,17 @@ def use_streaming(hook_config, config, log_prefix): # pragma: no cover
def dump_data_sources(
hook_config,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
dry_run,
):
'''
Given a bootstrap configuration dict, a configuration dict, a log prefix, the borgmatic
configuration file paths, the borgmatic runtime directory, the configured patterns, and whether
this is a dry run, create a borgmatic manifest file to store the paths of the configuration
files used to create the archive. But skip this if the bootstrap store_config_files option is
False or if this is a dry run.
Given a bootstrap configuration dict, a configuration dict, the borgmatic configuration file
paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
create a borgmatic manifest file to store the paths of the configuration files used to create
the archive. But skip this if the bootstrap store_config_files option is False or if this is a
dry run.
Return an empty sequence, since there are no ongoing dump processes from this hook.
'''
@ -64,11 +63,11 @@ def dump_data_sources(
return []
def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
'''
Given a bootstrap configuration dict, a configuration dict, a log prefix, the borgmatic runtime
directory, and whether this is a dry run, then remove the manifest file created above. If this
is a dry run, then don't actually remove anything.
Given a bootstrap configuration dict, a configuration dict, the borgmatic runtime directory, and
whether this is a dry run, then remove the manifest file created above. If this is a dry run,
then don't actually remove anything.
'''
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
@ -79,13 +78,13 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
'bootstrap',
)
logger.debug(
f'{log_prefix}: Looking for bootstrap manifest files to remove in {manifest_glob}{dry_run_label}'
f'Looking for bootstrap manifest files to remove in {manifest_glob}{dry_run_label}'
)
for manifest_directory in glob.glob(manifest_glob):
manifest_file_path = os.path.join(manifest_directory, 'manifest.json')
logger.debug(
f'{log_prefix}: Removing bootstrap manifest at {manifest_file_path}{dry_run_label}'
f'Removing bootstrap manifest at {manifest_file_path}{dry_run_label}'
)
if dry_run:
@ -103,7 +102,7 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
def make_data_source_dump_patterns(
hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
hook_config, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the
@ -115,7 +114,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,

View file

@ -14,7 +14,7 @@ import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__)
def use_streaming(hook_config, config, log_prefix): # pragma: no cover
def use_streaming(hook_config, config): # pragma: no cover
'''
Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
'''
@ -211,26 +211,24 @@ def snapshot_subvolume(btrfs_command, subvolume_path, snapshot_path): # pragma:
def dump_data_sources(
hook_config,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
dry_run,
):
'''
Given a Btrfs configuration dict, a configuration dict, a log prefix, the borgmatic
configuration file paths, the borgmatic runtime directory, the configured patterns, and whether
this is a dry run, auto-detect and snapshot any Btrfs subvolume mount points listed in the given
patterns. Also update those patterns, replacing subvolume mount points with corresponding
snapshot directories so they get stored in the Borg archive instead. Use the log prefix in any
log entries.
Given a Btrfs configuration dict, a configuration dict, the borgmatic configuration file paths,
the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
auto-detect and snapshot any Btrfs subvolume mount points listed in the given patterns. Also
update those patterns, replacing subvolume mount points with corresponding snapshot directories
so they get stored in the Borg archive instead.
Return an empty sequence, since there are no ongoing dump processes from this hook.
If this is a dry run, then don't actually snapshot anything.
'''
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'{log_prefix}: Snapshotting Btrfs subvolumes{dry_run_label}')
logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}')
# Based on the configured patterns, determine Btrfs subvolumes to backup.
btrfs_command = hook_config.get('btrfs_command', 'btrfs')
@ -238,11 +236,11 @@ def dump_data_sources(
subvolumes = get_subvolumes(btrfs_command, findmnt_command, patterns)
if not subvolumes:
logger.warning(f'{log_prefix}: No Btrfs subvolumes found to snapshot{dry_run_label}')
logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}')
# Snapshot each subvolume, rewriting patterns to use their snapshot paths.
for subvolume in subvolumes:
logger.debug(f'{log_prefix}: Creating Btrfs snapshot for {subvolume.path} subvolume')
logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume')
snapshot_path = make_snapshot_path(subvolume.path)
@ -280,12 +278,11 @@ def delete_snapshot(btrfs_command, snapshot_path): # pragma: no cover
)
def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
'''
Given a Btrfs configuration dict, a configuration dict, a log prefix, the borgmatic runtime
directory, and whether this is a dry run, delete any Btrfs snapshots created by borgmatic. Use
the log prefix in any log entries. If this is a dry run or Btrfs isn't configured in borgmatic's
configuration, then don't actually remove anything.
Given a Btrfs configuration dict, a configuration dict, the borgmatic runtime directory, and
whether this is a dry run, delete any Btrfs snapshots created by borgmatic. If this is a dry run
or Btrfs isn't configured in borgmatic's configuration, then don't actually remove anything.
'''
if hook_config is None:
return
@ -298,10 +295,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
all_subvolumes = get_subvolumes(btrfs_command, findmnt_command)
except FileNotFoundError as error:
logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
logger.debug(f'Could not find "{error.filename}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
# Reversing the sorted subvolumes ensures that we remove longer mount point paths of child
@ -313,14 +310,14 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
)
logger.debug(
f'{log_prefix}: Looking for snapshots to remove in {subvolume_snapshots_glob}{dry_run_label}'
f'Looking for snapshots to remove in {subvolume_snapshots_glob}{dry_run_label}'
)
for snapshot_path in glob.glob(subvolume_snapshots_glob):
if not os.path.isdir(snapshot_path):
continue
logger.debug(f'{log_prefix}: Deleting Btrfs snapshot {snapshot_path}{dry_run_label}')
logger.debug(f'Deleting Btrfs snapshot {snapshot_path}{dry_run_label}')
if dry_run:
continue
@ -328,10 +325,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
delete_snapshot(btrfs_command, snapshot_path)
except FileNotFoundError:
logger.debug(f'{log_prefix}: Could not find "{btrfs_command}" command')
logger.debug(f'Could not find "{btrfs_command}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
# Strip off the subvolume path from the end of the snapshot path and then delete the
@ -340,7 +337,7 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
def make_data_source_dump_patterns(
hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
hook_config, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Restores aren't implemented, because stored files can be extracted directly with "extract".
@ -351,7 +348,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,

View file

@ -46,14 +46,14 @@ def create_named_pipe_for_dump(dump_path):
os.mkfifo(dump_path, mode=0o600)
def remove_data_source_dumps(dump_path, data_source_type_name, log_prefix, dry_run):
def remove_data_source_dumps(dump_path, data_source_type_name, dry_run):
'''
Remove all data source dumps in the given dump directory path (including the directory itself).
If this is a dry run, then don't actually remove anything.
'''
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
logger.debug(f'{log_prefix}: Removing {data_source_type_name} data source dumps{dry_run_label}')
logger.debug(f'Removing {data_source_type_name} data source dumps{dry_run_label}')
if dry_run:
return

View file

@ -14,7 +14,7 @@ import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__)
def use_streaming(hook_config, config, log_prefix): # pragma: no cover
def use_streaming(hook_config, config): # pragma: no cover
'''
Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
'''
@ -161,26 +161,24 @@ DEFAULT_SNAPSHOT_SIZE = '10%ORIGIN'
def dump_data_sources(
hook_config,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
dry_run,
):
'''
Given an LVM configuration dict, a configuration dict, a log prefix, the borgmatic configuration
file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry
run, auto-detect and snapshot any LVM logical volume mount points listed in the given patterns.
Also update those patterns, replacing logical volume mount points with corresponding snapshot
directories so they get stored in the Borg archive instead. Use the log prefix in any log
entries.
Given an LVM configuration dict, a configuration dict, the borgmatic configuration file paths,
the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
auto-detect and snapshot any LVM logical volume mount points listed in the given patterns. Also
update those patterns, replacing logical volume mount points with corresponding snapshot
directories so they get stored in the Borg archive instead.
Return an empty sequence, since there are no ongoing dump processes from this hook.
If this is a dry run, then don't actually snapshot anything.
'''
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'{log_prefix}: Snapshotting LVM logical volumes{dry_run_label}')
logger.info(f'Snapshotting LVM logical volumes{dry_run_label}')
# List logical volumes to get their mount points.
lsblk_command = hook_config.get('lsblk_command', 'lsblk')
@ -191,12 +189,12 @@ def dump_data_sources(
normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
if not requested_logical_volumes:
logger.warning(f'{log_prefix}: No LVM logical volumes found to snapshot{dry_run_label}')
logger.warning(f'No LVM logical volumes found to snapshot{dry_run_label}')
for logical_volume in requested_logical_volumes:
snapshot_name = f'{logical_volume.name}_{snapshot_suffix}'
logger.debug(
f'{log_prefix}: Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
f'Creating LVM snapshot {snapshot_name} of {logical_volume.mount_point}{dry_run_label}'
)
if not dry_run:
@ -224,7 +222,7 @@ def dump_data_sources(
)
logger.debug(
f'{log_prefix}: Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
f'Mounting LVM snapshot {snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
@ -312,12 +310,12 @@ def get_snapshots(lvs_command, snapshot_name=None):
raise ValueError(f'Invalid {lvs_command} output: Missing key "{error}"')
def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
'''
Given an LVM configuration dict, a configuration dict, a log prefix, the borgmatic runtime
directory, and whether this is a dry run, unmount and delete any LVM snapshots created by
borgmatic. Use the log prefix in any log entries. If this is a dry run or LVM isn't configured
in borgmatic's configuration, then don't actually remove anything.
Given an LVM configuration dict, a configuration dict, the borgmatic runtime directory, and
whether this is a dry run, unmount and delete any LVM snapshots created by borgmatic. If this is
a dry run or LVM isn't configured in borgmatic's configuration, then don't actually remove
anything.
'''
if hook_config is None:
return
@ -328,10 +326,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
logical_volumes = get_logical_volumes(hook_config.get('lsblk_command', 'lsblk'))
except FileNotFoundError as error:
logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
logger.debug(f'Could not find "{error.filename}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
snapshots_glob = os.path.join(
@ -341,7 +339,7 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
'lvm_snapshots',
)
logger.debug(
f'{log_prefix}: Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
)
umount_command = hook_config.get('umount_command', 'umount')
@ -367,7 +365,7 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
continue
logger.debug(
f'{log_prefix}: Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}'
f'Unmounting LVM snapshot at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
@ -376,10 +374,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
unmount_snapshot(umount_command, snapshot_mount_path)
except FileNotFoundError:
logger.debug(f'{log_prefix}: Could not find "{umount_command}" command')
logger.debug(f'Could not find "{umount_command}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
if not dry_run:
@ -391,10 +389,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
snapshots = get_snapshots(hook_config.get('lvs_command', 'lvs'))
except FileNotFoundError as error:
logger.debug(f'{log_prefix}: Could not find "{error.filename}" command')
logger.debug(f'Could not find "{error.filename}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
for snapshot in snapshots:
@ -402,14 +400,14 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
if not snapshot.name.split('_')[-1].startswith(BORGMATIC_SNAPSHOT_PREFIX):
continue
logger.debug(f'{log_prefix}: Deleting LVM snapshot {snapshot.name}{dry_run_label}')
logger.debug(f'Deleting LVM snapshot {snapshot.name}{dry_run_label}')
if not dry_run:
remove_snapshot(lvremove_command, snapshot.device_path)
def make_data_source_dump_patterns(
hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
hook_config, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Restores aren't implemented, because stored files can be extracted directly with "extract".
@ -420,7 +418,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,

View file

@ -25,7 +25,7 @@ def make_dump_path(base_directory): # pragma: no cover
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
def database_names_to_dump(database, extra_environment, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all", query for the names of databases on the configured host and return them,
@ -49,7 +49,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
+ ('--skip-column-names', '--batch')
+ ('--execute', 'show schemas')
)
logger.debug(f'{log_prefix}: Querying for "all" MariaDB databases to dump')
logger.debug(f'Querying for "all" MariaDB databases to dump')
show_output = execute_command_and_capture_output(
show_command, extra_environment=extra_environment
)
@ -62,12 +62,11 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
def execute_dump_command(
database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
database, dump_path, database_names, extra_environment, dry_run, dry_run_label
):
'''
Kick off a dump for the given MariaDB database (provided as a configuration dict) to a named
pipe constructed from the given dump path and database name. Use the given log prefix in any
log entries.
pipe constructed from the given dump path and database name.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
@ -82,7 +81,7 @@ def execute_dump_command(
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MariaDB database "{database_name}" to {dump_filename}'
f'Skipping duplicate dump of MariaDB database "{database_name}" to {dump_filename}'
)
return None
@ -104,7 +103,7 @@ def execute_dump_command(
)
logger.debug(
f'{log_prefix}: Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}'
f'Dumping MariaDB database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
return None
@ -118,14 +117,14 @@ def execute_dump_command(
)
def get_default_port(databases, config, log_prefix): # pragma: no cover
def get_default_port(databases, config): # pragma: no cover
return 3306
def use_streaming(databases, config, log_prefix):
def use_streaming(databases, config):
'''
Given a sequence of MariaDB database configuration dicts, a configuration dict (ignored), and a
log prefix (ignored), return whether streaming will be using during dumps.
Given a sequence of MariaDB database configuration dicts, a configuration dict (ignored), return
whether streaming will be using during dumps.
'''
return any(databases)
@ -133,7 +132,6 @@ def use_streaming(databases, config, log_prefix):
def dump_data_sources(
databases,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
@ -142,8 +140,7 @@ def dump_data_sources(
'''
Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given
borgmatic runtime directory to construct the destination path and the given log prefix in any
log entries.
borgmatic runtime directory to construct the destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -153,13 +150,13 @@ def dump_data_sources(
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'{log_prefix}: Dumping MariaDB databases{dry_run_label}')
logger.info(f'Dumping MariaDB databases{dry_run_label}')
for database in databases:
dump_path = make_dump_path(borgmatic_runtime_directory)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
database, extra_environment, dry_run
)
if not dump_database_names:
@ -175,7 +172,6 @@ def dump_data_sources(
processes.append(
execute_dump_command(
renamed_database,
log_prefix,
dump_path,
(dump_name,),
extra_environment,
@ -187,7 +183,6 @@ def dump_data_sources(
processes.append(
execute_dump_command(
database,
log_prefix,
dump_path,
dump_database_names,
extra_environment,
@ -207,25 +202,25 @@ def dump_data_sources(
def remove_data_source_dumps(
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
databases, config, borgmatic_runtime_directory, dry_run
): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
entries. If this is a dry run, then don't actually remove anything.
borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
actually remove anything.
'''
dump.remove_data_source_dumps(
make_dump_path(borgmatic_runtime_directory), 'MariaDB', log_prefix, dry_run
make_dump_path(borgmatic_runtime_directory), 'MariaDB', dry_run
)
def make_data_source_dump_patterns(
databases, config, log_prefix, borgmatic_runtime_directory, name=None
databases, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
borgmatic runtime directory, and a database name to match, return the corresponding glob
patterns to match the database dump in an archive.
Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
@ -243,7 +238,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,
@ -252,9 +246,9 @@ def restore_data_source_dump(
):
'''
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given log prefix is used
for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
configuration dict, but the given hook configuration is ignored. If this is a dry run, then
don't actually restore anything. Trigger the given active extract process (an instance of
subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
hostname = connection_params['hostname'] or data_source.get(
@ -288,7 +282,7 @@ def restore_data_source_dump(
)
extra_environment = {'MYSQL_PWD': password} if password else None
logger.debug(f"{log_prefix}: Restoring MariaDB database {data_source['name']}{dry_run_label}")
logger.debug(f"Restoring MariaDB database {data_source['name']}{dry_run_label}")
if dry_run:
return

View file

@ -17,14 +17,14 @@ def make_dump_path(base_directory): # pragma: no cover
return dump.make_data_source_dump_path(base_directory, 'mongodb_databases')
def get_default_port(databases, config, log_prefix): # pragma: no cover
def get_default_port(databases, config): # pragma: no cover
return 27017
def use_streaming(databases, config, log_prefix):
def use_streaming(databases, config):
'''
Given a sequence of MongoDB database configuration dicts, a configuration dict (ignored), and a
log prefix (ignored), return whether streaming will be using during dumps.
Given a sequence of MongoDB database configuration dicts, a configuration dict (ignored), return
whether streaming will be using during dumps.
'''
return any(database.get('format') != 'directory' for database in databases)
@ -32,7 +32,6 @@ def use_streaming(databases, config, log_prefix):
def dump_data_sources(
databases,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
@ -41,8 +40,7 @@ def dump_data_sources(
'''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the borgmatic
runtime directory to construct the destination path (used for the directory format and the given
log prefix in any log entries.
runtime directory to construct the destination path (used for the directory format.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -51,7 +49,7 @@ def dump_data_sources(
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}')
logger.info(f'Dumping MongoDB databases{dry_run_label}')
processes = []
for database in databases:
@ -65,7 +63,7 @@ def dump_data_sources(
dump_format = database.get('format', 'archive')
logger.debug(
f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
f'Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
)
if dry_run:
continue
@ -118,25 +116,25 @@ def build_dump_command(database, dump_filename, dump_format):
def remove_data_source_dumps(
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
databases, config, borgmatic_runtime_directory, dry_run
): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
entries. If this is a dry run, then don't actually remove anything.
borgmatic_runtime_directory to construct the destination path. If this is a dry run, then don't
actually remove anything.
'''
dump.remove_data_source_dumps(
make_dump_path(borgmatic_runtime_directory), 'MongoDB', log_prefix, dry_run
make_dump_path(borgmatic_runtime_directory), 'MongoDB', dry_run
)
def make_data_source_dump_patterns(
databases, config, log_prefix, borgmatic_runtime_directory, name=None
databases, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
borgmatic runtime directory, and a database name to match, return the corresponding glob
patterns to match the database dump in an archive.
Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
@ -154,7 +152,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,
@ -164,9 +161,9 @@ def restore_data_source_dump(
'''
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
used to construct the destination path. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
@ -181,7 +178,7 @@ def restore_data_source_dump(
extract_process, data_source, dump_filename, connection_params
)
logger.debug(f"{log_prefix}: Restoring MongoDB database {data_source['name']}{dry_run_label}")
logger.debug(f"Restoring MongoDB database {data_source['name']}{dry_run_label}")
if dry_run:
return

View file

@ -25,7 +25,7 @@ def make_dump_path(base_directory): # pragma: no cover
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
def database_names_to_dump(database, extra_environment, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all", query for the names of databases on the configured host and return them,
@ -49,7 +49,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
+ ('--skip-column-names', '--batch')
+ ('--execute', 'show schemas')
)
logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump')
logger.debug(f'Querying for "all" MySQL databases to dump')
show_output = execute_command_and_capture_output(
show_command, extra_environment=extra_environment
)
@ -62,12 +62,11 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
def execute_dump_command(
database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
database, dump_path, database_names, extra_environment, dry_run, dry_run_label
):
'''
Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
named pipe constructed from the given dump path and database name. Use the given log prefix in
any log entries.
named pipe constructed from the given dump path and database name.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
@ -82,7 +81,7 @@ def execute_dump_command(
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
f'Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
)
return None
@ -103,7 +102,7 @@ def execute_dump_command(
)
logger.debug(
f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
f'Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
return None
@ -117,14 +116,14 @@ def execute_dump_command(
)
def get_default_port(databases, config, log_prefix): # pragma: no cover
def get_default_port(databases, config): # pragma: no cover
return 3306
def use_streaming(databases, config, log_prefix):
def use_streaming(databases, config):
'''
Given a sequence of MySQL database configuration dicts, a configuration dict (ignored), and a
log prefix (ignored), return whether streaming will be using during dumps.
Given a sequence of MySQL database configuration dicts, a configuration dict (ignored), return
whether streaming will be using during dumps.
'''
return any(databases)
@ -132,7 +131,6 @@ def use_streaming(databases, config, log_prefix):
def dump_data_sources(
databases,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
@ -141,8 +139,7 @@ def dump_data_sources(
'''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
of dicts, one dict describing each database as per the configuration schema. Use the given
borgmatic runtime directory to construct the destination path and the given log prefix in any
log entries.
borgmatic runtime directory to construct the destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -152,13 +149,13 @@ def dump_data_sources(
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
logger.info(f'Dumping MySQL databases{dry_run_label}')
for database in databases:
dump_path = make_dump_path(borgmatic_runtime_directory)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
database, extra_environment, dry_run
)
if not dump_database_names:
@ -174,7 +171,6 @@ def dump_data_sources(
processes.append(
execute_dump_command(
renamed_database,
log_prefix,
dump_path,
(dump_name,),
extra_environment,
@ -186,7 +182,6 @@ def dump_data_sources(
processes.append(
execute_dump_command(
database,
log_prefix,
dump_path,
dump_database_names,
extra_environment,
@ -206,25 +201,25 @@ def dump_data_sources(
def remove_data_source_dumps(
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
databases, config, borgmatic_runtime_directory, dry_run
): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the
borgmatic runtime directory to construct the destination path and the log prefix in any log
entries. If this is a dry run, then don't actually remove anything.
borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
actually remove anything.
'''
dump.remove_data_source_dumps(
make_dump_path(borgmatic_runtime_directory), 'MySQL', log_prefix, dry_run
make_dump_path(borgmatic_runtime_directory), 'MySQL', dry_run
)
def make_data_source_dump_patterns(
databases, config, log_prefix, borgmatic_runtime_directory, name=None
databases, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
borgmatic runtime directory, and a database name to match, return the corresponding glob
patterns to match the database dump in an archive.
Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
@ -242,7 +237,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,
@ -251,9 +245,9 @@ def restore_data_source_dump(
):
'''
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given log prefix is used
for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
configuration dict, but the given hook configuration is ignored. If this is a dry run, then
don't actually restore anything. Trigger the given active extract process (an instance of
subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
hostname = connection_params['hostname'] or data_source.get(
@ -287,7 +281,7 @@ def restore_data_source_dump(
)
extra_environment = {'MYSQL_PWD': password} if password else None
logger.debug(f"{log_prefix}: Restoring MySQL database {data_source['name']}{dry_run_label}")
logger.debug(f"Restoring MySQL database {data_source['name']}{dry_run_label}")
if dry_run:
return

View file

@ -58,7 +58,7 @@ def make_extra_environment(database, restore_connection_params=None):
EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
def database_names_to_dump(database, extra_environment, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all" when a database format is given, query for the names of databases on the
@ -85,7 +85,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
+ (('--username', database['username']) if 'username' in database else ())
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
)
logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump')
logger.debug(f'Querying for "all" PostgreSQL databases to dump')
list_output = execute_command_and_capture_output(
list_command, extra_environment=extra_environment
)
@ -97,14 +97,14 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
)
def get_default_port(databases, config, log_prefix): # pragma: no cover
def get_default_port(databases, config): # pragma: no cover
return 5432
def use_streaming(databases, config, log_prefix):
def use_streaming(databases, config):
'''
Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored), and
a log prefix (ignored), return whether streaming will be using during dumps.
Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored),
return whether streaming will be using during dumps.
'''
return any(database.get('format') != 'directory' for database in databases)
@ -112,7 +112,6 @@ def use_streaming(databases, config, log_prefix):
def dump_data_sources(
databases,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
@ -121,8 +120,7 @@ def dump_data_sources(
'''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given
borgmatic runtime directory to construct the destination path and the given log prefix in any
log entries.
borgmatic runtime directory to construct the destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -134,13 +132,13 @@ def dump_data_sources(
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}')
logger.info(f'Dumping PostgreSQL databases{dry_run_label}')
for database in databases:
extra_environment = make_extra_environment(database)
dump_path = make_dump_path(borgmatic_runtime_directory)
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
database, extra_environment, dry_run
)
if not dump_database_names:
@ -164,7 +162,7 @@ def dump_data_sources(
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
f'Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
)
continue
@ -198,7 +196,7 @@ def dump_data_sources(
)
logger.debug(
f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
f'Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
continue
@ -232,25 +230,25 @@ def dump_data_sources(
def remove_data_source_dumps(
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
databases, config, borgmatic_runtime_directory, dry_run
): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the
borgmatic runtime directory to construct the destination path and the log prefix in any log
entries. If this is a dry run, then don't actually remove anything.
borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
actually remove anything.
'''
dump.remove_data_source_dumps(
make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', log_prefix, dry_run
make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', dry_run
)
def make_data_source_dump_patterns(
databases, config, log_prefix, borgmatic_runtime_directory, name=None
databases, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
borgmatic runtime directory, and a database name to match, return the corresponding glob
patterns to match the database dump in an archive.
Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
@ -268,7 +266,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,
@ -278,10 +275,9 @@ def restore_data_source_dump(
'''
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given borgmatic runtime
directory is used to construct the destination path (used for the directory format), and the
given log prefix is used for any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
directory is used to construct the destination path (used for the directory format). If this is
a dry run, then don't actually restore anything. Trigger the given active extract process (an
instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
@ -355,7 +351,7 @@ def restore_data_source_dump(
)
logger.debug(
f"{log_prefix}: Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
f"Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
)
if dry_run:
return

View file

@ -17,14 +17,14 @@ def make_dump_path(base_directory): # pragma: no cover
return dump.make_data_source_dump_path(base_directory, 'sqlite_databases')
def get_default_port(databases, config, log_prefix): # pragma: no cover
def get_default_port(databases, config): # pragma: no cover
return None # SQLite doesn't use a port.
def use_streaming(databases, config, log_prefix):
def use_streaming(databases, config):
'''
Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), and a
log prefix (ignored), return whether streaming will be using during dumps.
Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), return
whether streaming will be using during dumps.
'''
return any(databases)
@ -32,7 +32,6 @@ def use_streaming(databases, config, log_prefix):
def dump_data_sources(
databases,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
@ -41,7 +40,7 @@ def dump_data_sources(
'''
Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given borgmatic runtime directory
to construct the destination path and the given log prefix in any log entries.
to construct the destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -51,7 +50,7 @@ def dump_data_sources(
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
logger.info(f'Dumping SQLite databases{dry_run_label}')
for database in databases:
database_path = database['path']
@ -60,7 +59,7 @@ def dump_data_sources(
logger.warning('The "all" database name has no meaning for SQLite databases')
if not os.path.exists(database_path):
logger.warning(
f'{log_prefix}: No SQLite database at {database_path}; an empty database will be created and dumped'
f'No SQLite database at {database_path}; an empty database will be created and dumped'
)
dump_path = make_dump_path(borgmatic_runtime_directory)
@ -68,7 +67,7 @@ def dump_data_sources(
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
f'Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
)
continue
@ -80,7 +79,7 @@ def dump_data_sources(
shlex.quote(dump_filename),
)
logger.debug(
f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
f'Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
)
if dry_run:
continue
@ -99,25 +98,25 @@ def dump_data_sources(
def remove_data_source_dumps(
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
databases, config, borgmatic_runtime_directory, dry_run
): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the
borgmatic runtime directory to construct the destination path and the log prefix in any log
entries. If this is a dry run, then don't actually remove anything.
borgmatic runtime directory to construct the destination path. If this is a dry run, then don't
actually remove anything.
'''
dump.remove_data_source_dumps(
make_dump_path(borgmatic_runtime_directory), 'SQLite', log_prefix, dry_run
make_dump_path(borgmatic_runtime_directory), 'SQLite', dry_run
)
def make_data_source_dump_patterns(
databases, config, log_prefix, borgmatic_runtime_directory, name=None
databases, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
borgmatic runtime directory, and a database name to match, return the corresponding glob
patterns to match the database dump in an archive.
Given a sequence of configurations dicts, a configuration dict, the borgmatic runtime directory,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
@ -135,7 +134,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,
@ -144,22 +142,22 @@ def restore_data_source_dump(
):
'''
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given log prefix is used
for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
configuration dict, but the given hook configuration is ignored. If this is a dry run, then
don't actually restore anything. Trigger the given active extract process (an instance of
subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
database_path = connection_params['restore_path'] or data_source.get(
'restore_path', data_source.get('path')
)
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')
logger.debug(f'Restoring SQLite database at {database_path}{dry_run_label}')
if dry_run:
return
try:
os.remove(database_path)
logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}')
logger.warning(f'Removed existing SQLite database at {database_path}')
except FileNotFoundError: # pragma: no cover
pass

View file

@ -13,7 +13,7 @@ import borgmatic.hooks.data_source.snapshot
logger = logging.getLogger(__name__)
def use_streaming(hook_config, config, log_prefix): # pragma: no cover
def use_streaming(hook_config, config): # pragma: no cover
'''
Return whether dump streaming is used for this hook. (Spoiler: It isn't.)
'''
@ -189,26 +189,25 @@ def make_borg_snapshot_pattern(pattern, normalized_runtime_directory):
def dump_data_sources(
hook_config,
config,
log_prefix,
config_paths,
borgmatic_runtime_directory,
patterns,
dry_run,
):
'''
Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic configuration
file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry
run, auto-detect and snapshot any ZFS dataset mount points listed in the given patterns and any
Given a ZFS configuration dict, a configuration dict, the borgmatic configuration file paths,
the borgmatic runtime directory, the configured patterns, and whether this is a dry run,
auto-detect and snapshot any ZFS dataset mount points listed in the given patterns and any
dataset with a borgmatic-specific user property. Also update those patterns, replacing dataset
mount points with corresponding snapshot directories so they get stored in the Borg archive
instead. Use the log prefix in any log entries.
instead.
Return an empty sequence, since there are no ongoing dump processes from this hook.
If this is a dry run, then don't actually snapshot anything.
'''
dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else ''
logger.info(f'{log_prefix}: Snapshotting ZFS datasets{dry_run_label}')
logger.info(f'Snapshotting ZFS datasets{dry_run_label}')
# List ZFS datasets to get their mount points.
zfs_command = hook_config.get('zfs_command', 'zfs')
@ -219,12 +218,12 @@ def dump_data_sources(
normalized_runtime_directory = os.path.normpath(borgmatic_runtime_directory)
if not requested_datasets:
logger.warning(f'{log_prefix}: No ZFS datasets found to snapshot{dry_run_label}')
logger.warning(f'No ZFS datasets found to snapshot{dry_run_label}')
for dataset in requested_datasets:
full_snapshot_name = f'{dataset.name}@{snapshot_name}'
logger.debug(
f'{log_prefix}: Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
f'Creating ZFS snapshot {full_snapshot_name} of {dataset.mount_point}{dry_run_label}'
)
if not dry_run:
@ -239,7 +238,7 @@ def dump_data_sources(
)
logger.debug(
f'{log_prefix}: Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
f'Mounting ZFS snapshot {full_snapshot_name} at {snapshot_mount_path}{dry_run_label}'
)
if dry_run:
@ -306,12 +305,12 @@ def get_all_snapshots(zfs_command):
return tuple(line.rstrip() for line in list_output.splitlines())
def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_directory, dry_run):
def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, dry_run):
'''
Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic runtime
directory, and whether this is a dry run, unmount and destroy any ZFS snapshots created by
borgmatic. Use the log prefix in any log entries. If this is a dry run or ZFS isn't configured
in borgmatic's configuration, then don't actually remove anything.
Given a ZFS configuration dict, a configuration dict, the borgmatic runtime directory, and
whether this is a dry run, unmount and destroy any ZFS snapshots created by borgmatic. If this
is a dry run or ZFS isn't configured in borgmatic's configuration, then don't actually remove
anything.
'''
if hook_config is None:
return
@ -324,10 +323,10 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
try:
dataset_mount_points = get_all_dataset_mount_points(zfs_command)
except FileNotFoundError:
logger.debug(f'{log_prefix}: Could not find "{zfs_command}" command')
logger.debug(f'Could not find "{zfs_command}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
snapshots_glob = os.path.join(
@ -337,7 +336,7 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
'zfs_snapshots',
)
logger.debug(
f'{log_prefix}: Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
f'Looking for snapshots to remove in {snapshots_glob}{dry_run_label}'
)
umount_command = hook_config.get('umount_command', 'umount')
@ -364,17 +363,17 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
continue
logger.debug(
f'{log_prefix}: Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
f'Unmounting ZFS snapshot at {snapshot_mount_path}{dry_run_label}'
)
if not dry_run:
try:
unmount_snapshot(umount_command, snapshot_mount_path)
except FileNotFoundError:
logger.debug(f'{log_prefix}: Could not find "{umount_command}" command')
logger.debug(f'Could not find "{umount_command}" command')
return
except subprocess.CalledProcessError as error:
logger.debug(f'{log_prefix}: {error}')
logger.debug(error)
return
if not dry_run:
@ -388,14 +387,14 @@ def remove_data_source_dumps(hook_config, config, log_prefix, borgmatic_runtime_
if not full_snapshot_name.split('@')[-1].startswith(BORGMATIC_SNAPSHOT_PREFIX):
continue
logger.debug(f'{log_prefix}: Destroying ZFS snapshot {full_snapshot_name}{dry_run_label}')
logger.debug(f'Destroying ZFS snapshot {full_snapshot_name}{dry_run_label}')
if not dry_run:
destroy_snapshot(zfs_command, full_snapshot_name)
def make_data_source_dump_patterns(
hook_config, config, log_prefix, borgmatic_runtime_directory, name=None
hook_config, config, borgmatic_runtime_directory, name=None
): # pragma: no cover
'''
Restores aren't implemented, because stored files can be extracted directly with "extract".
@ -406,7 +405,6 @@ def make_data_source_dump_patterns(
def restore_data_source_dump(
hook_config,
config,
log_prefix,
data_source,
dry_run,
extract_process,

View file

@ -21,12 +21,12 @@ def get_submodule_names(parent_module): # pragma: no cover
return tuple(module_info.name for module_info in pkgutil.iter_modules(parent_module.__path__))
def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
def call_hook(function_name, config, hook_name, *args, **kwargs):
'''
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to the given hook name. Supply that call with the configuration
for this hook (if any), the log prefix, and any given args and kwargs. Return the return value
of that call or None if the module in question is not a hook.
Given a configuration dict, call the requested function of the Python module corresponding to
the given hook name. Supply that call with the configuration for this hook (if any) and any
given args and kwargs. Return the return value of that call or None if the module in question is
not a hook.
Raise ValueError if the hook name is unknown.
Raise AttributeError if the function name is not found in the module.
@ -54,17 +54,16 @@ def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
else:
raise ValueError(f'Unknown hook name: {hook_name}')
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
logger.debug(f'Calling {hook_name} hook function {function_name}')
return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs)
return getattr(module, function_name)(hook_config, config, *args, **kwargs)
def call_hooks(function_name, config, log_prefix, hook_type, *args, **kwargs):
def call_hooks(function_name, config, hook_type, *args, **kwargs):
'''
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to each hook of the given hook type (either "data_source" or
"monitoring"). Supply each call with the configuration for that hook, the log prefix, and any
given args and kwargs.
Given a configuration dict, call the requested function of the Python module corresponding to
each hook of the given hook type (either "data_source" or "monitoring"). Supply each call with
the configuration for that hook, and any given args and kwargs.
Collect any return values into a dict from module name to return value. Note that the module
name is the name of the hook module itself, which might be different from the hook configuration
@ -78,7 +77,7 @@ def call_hooks(function_name, config, log_prefix, hook_type, *args, **kwargs):
Raise anything else that a called function raises. An error stops calls to subsequent functions.
'''
return {
hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
hook_name: call_hook(function_name, config, hook_name, *args, **kwargs)
for hook_name in get_submodule_names(
importlib.import_module(f'borgmatic.hooks.{hook_type.value}')
)
@ -86,18 +85,18 @@ def call_hooks(function_name, config, log_prefix, hook_type, *args, **kwargs):
}
def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_type, *args, **kwargs):
def call_hooks_even_if_unconfigured(function_name, config, hook_type, *args, **kwargs):
'''
Given a configuration dict and a prefix to use in log entries, call the requested function of
the Python module corresponding to each hook of the given hook type (either "data_source" or
"monitoring"). Supply each call with the configuration for that hook, the log prefix, and any
given args and kwargs. Collect any return values into a dict from hook name to return value.
Given a configuration dict, call the requested function of the Python module corresponding to
each hook of the given hook type (either "data_source" or "monitoring"). Supply each call with
the configuration for that hook and any given args and kwargs. Collect any return values into a
dict from hook name to return value.
Raise AttributeError if the function name is not found in the module.
Raise anything else that a called function raises. An error stops calls to subsequent functions.
'''
return {
hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
hook_name: call_hook(function_name, config, hook_name, *args, **kwargs)
for hook_name in get_submodule_names(
importlib.import_module(f'borgmatic.hooks.{hook_type.value}')
)

View file

@ -88,8 +88,8 @@ class Multi_stream_handler(logging.Handler):
class Console_no_color_formatter(logging.Formatter):
def format(self, record): # pragma: no cover
return record.msg
def __init__(self, *args, **kwargs):
super(Console_no_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
class Color(enum.Enum):
@ -102,6 +102,9 @@ class Color(enum.Enum):
class Console_color_formatter(logging.Formatter):
def __init__(self, *args, **kwargs):
super(Console_color_formatter, self).__init__('{prefix}{message}', style='{', defaults={'prefix': ''}, *args, **kwargs)
def format(self, record):
add_custom_log_levels()
@ -118,7 +121,7 @@ class Console_color_formatter(logging.Formatter):
.value
)
return color_text(color, record.msg)
return color_text(color, super(Console_color_formatter, self).format(record))
def ansi_escape_code(color): # pragma: no cover
@ -179,6 +182,16 @@ def add_custom_log_levels(): # pragma: no cover
add_logging_level('DISABLED', DISABLED)
def set_log_prefix(prefix):
'''
Given a prefix string, set it onto the formatter defaults for every logging handler so that it
shows up in every subsequent logging message. For this to work, this relies on each logging
formatter to be initialized with "{prefix}" somewhere in its logging format.
'''
for handler in logging.getLogger().handlers:
handler.formatter._style._defaults = {'prefix': f'{prefix}: ' if prefix else ''}
def configure_logging(
console_log_level,
syslog_log_level=None,
@ -242,7 +255,7 @@ def configure_logging(
if syslog_path:
syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
syslog_handler.setFormatter(
logging.Formatter('borgmatic: {levelname} {message}', style='{') # noqa: FS003
logging.Formatter('borgmatic: {levelname} {prefix}{message}', style='{', defaults={'prefix': ''}) # noqa: FS003
)
syslog_handler.setLevel(syslog_log_level)
handlers.append(syslog_handler)
@ -251,7 +264,7 @@ def configure_logging(
file_handler = logging.handlers.WatchedFileHandler(log_file)
file_handler.setFormatter(
logging.Formatter(
log_file_format or '[{asctime}] {levelname}: {message}', style='{' # noqa: FS003
log_file_format or '[{asctime}] {levelname}: {prefix}{message}', style='{', defaults={'prefix': ''} # noqa: FS003
)
)
file_handler.setLevel(log_file_log_level)

View file

@ -1,6 +1,6 @@
[project]
name = "borgmatic"
version = "1.9.8"
version = "1.9.9.dev0"
authors = [
{ name="Dan Helfman", email="witten@torsion.org" },
]

View file

@ -931,7 +931,6 @@ def test_compare_spot_check_hashes_returns_paths_having_failing_hashes():
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar', '/baz', '/quux'),
) == ('/bar',)
@ -972,7 +971,6 @@ def test_compare_spot_check_hashes_returns_relative_paths_having_failing_hashes(
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('foo', 'bar', 'baz', 'quux'),
) == ('bar',)
@ -1013,7 +1011,6 @@ def test_compare_spot_check_hashes_handles_data_sample_percentage_above_100():
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar'),
) == ('/foo', '/bar')
@ -1051,7 +1048,6 @@ def test_compare_spot_check_hashes_uses_xxh64sum_command_option():
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar', '/baz', '/quux'),
) == ('/bar',)
@ -1088,7 +1084,6 @@ def test_compare_spot_check_hashes_considers_path_missing_from_archive_as_not_ma
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar', '/baz', '/quux'),
) == ('/bar',)
@ -1124,7 +1119,6 @@ def test_compare_spot_check_hashes_considers_non_existent_path_as_not_matching()
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar', '/baz', '/quux'),
) == ('/bar',)
@ -1171,7 +1165,6 @@ def test_compare_spot_check_hashes_with_too_many_paths_feeds_them_to_commands_in
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('/foo', '/bar', '/baz', '/quux'),
) == ('/quux',)
@ -1214,7 +1207,6 @@ def test_compare_spot_check_hashes_uses_working_directory_to_access_source_paths
global_arguments=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
log_prefix='repo',
source_paths=('foo', 'bar', 'baz', 'quux'),
) == ('bar',)

View file

@ -194,7 +194,6 @@ def test_get_configured_data_source_matches_data_source_with_restore_dump():
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
restore_dump=module.Dump('postgresql_databases', 'bar'),
log_prefix='test',
) == {'name': 'bar'}
@ -206,7 +205,6 @@ def test_get_configured_data_source_matches_nothing_when_nothing_configured():
module.get_configured_data_source(
config={},
restore_dump=module.Dump('postgresql_databases', 'quux'),
log_prefix='test',
)
is None
)
@ -222,7 +220,6 @@ def test_get_configured_data_source_matches_nothing_when_restore_dump_does_not_m
'postgresql_databases': [{'name': 'foo'}],
},
restore_dump=module.Dump('postgresql_databases', 'quux'),
log_prefix='test',
)
is None
)
@ -250,7 +247,6 @@ def test_get_configured_data_source_with_multiple_matching_data_sources_errors()
],
},
restore_dump=module.Dump('postgresql_databases', 'bar'),
log_prefix='test',
)
@ -291,7 +287,6 @@ def test_restore_single_dump_extracts_and_restores_single_file_dump():
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
function_name='restore_data_source_dump',
config=object,
log_prefix=object,
hook_name=object,
data_source=object,
dry_run=object,
@ -334,7 +329,6 @@ def test_restore_single_dump_extracts_and_restores_directory_dump():
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
function_name='restore_data_source_dump',
config=object,
log_prefix=object,
hook_name=object,
data_source=object,
dry_run=object,
@ -377,7 +371,6 @@ def test_restore_single_dump_with_directory_dump_error_cleans_up_temporary_direc
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
function_name='restore_data_source_dump',
config=object,
log_prefix=object,
hook_name=object,
data_source=object,
dry_run=object,
@ -419,7 +412,6 @@ def test_restore_single_dump_with_directory_dump_and_dry_run_skips_directory_mov
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hook').with_args(
function_name='restore_data_source_dump',
config=object,
log_prefix=object,
hook_name=object,
data_source=object,
dry_run=object,
@ -1064,17 +1056,14 @@ def test_run_restore_restores_data_source_configured_with_all_name():
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
log_prefix=object,
).and_return({'name': 'foo'})
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='bar'),
log_prefix=object,
).and_return(None)
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='all'),
log_prefix=object,
).and_return({'name': 'bar'})
flexmock(module).should_receive('restore_single_dump').with_args(
repository=object,
@ -1148,17 +1137,14 @@ def test_run_restore_skips_missing_data_source():
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
log_prefix=object,
).and_return({'name': 'foo'})
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='bar'),
log_prefix=object,
).and_return(None)
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='all'),
log_prefix=object,
).and_return(None)
flexmock(module).should_receive('restore_single_dump').with_args(
repository=object,
@ -1232,12 +1218,10 @@ def test_run_restore_restores_data_sources_from_different_hooks():
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='postgresql_databases', data_source_name='foo'),
log_prefix=object,
).and_return({'name': 'foo'})
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
restore_dump=module.Dump(hook_name='mysql_databases', data_source_name='foo'),
log_prefix=object,
).and_return({'name': 'bar'})
flexmock(module).should_receive('restore_single_dump').with_args(
repository=object,

View file

@ -17,13 +17,12 @@ def test_write_patterns_file_writes_pattern_lines():
module.write_patterns_file(
[Pattern('/foo'), Pattern('/foo/bar', Pattern_type.INCLUDE, Pattern_style.SHELL)],
borgmatic_runtime_directory='/run/user/0',
log_prefix='test.yaml',
)
def test_write_patterns_file_with_empty_exclude_patterns_does_not_raise():
module.write_patterns_file(
[], borgmatic_runtime_directory='/run/user/0', log_prefix='test.yaml'
[], borgmatic_runtime_directory='/run/user/0'
)
@ -36,7 +35,6 @@ def test_write_patterns_file_appends_to_existing():
module.write_patterns_file(
[Pattern('/foo'), Pattern('/foo/bar', Pattern_type.INCLUDE)],
borgmatic_runtime_directory='/run/user/0',
log_prefix='test.yaml',
patterns_file=patterns_file,
)

View file

@ -24,7 +24,6 @@ def test_dump_data_sources_creates_manifest_file():
module.dump_data_sources(
hook_config=None,
config={},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=[],
@ -40,7 +39,6 @@ def test_dump_data_sources_with_store_config_files_false_does_not_create_manifes
module.dump_data_sources(
hook_config=hook_config,
config={'bootstrap': hook_config},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=[],
@ -55,7 +53,6 @@ def test_dump_data_sources_with_dry_run_does_not_create_manifest_file():
module.dump_data_sources(
hook_config=None,
config={},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=[],
@ -76,7 +73,6 @@ def test_remove_data_source_dumps_deletes_manifest_and_parent_directory():
module.remove_data_source_dumps(
hook_config=None,
config={},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -93,7 +89,6 @@ def test_remove_data_source_dumps_with_dry_run_bails():
module.remove_data_source_dumps(
hook_config=None,
config={},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=True,
)
@ -112,7 +107,6 @@ def test_remove_data_source_dumps_swallows_manifest_file_not_found_error():
module.remove_data_source_dumps(
hook_config=None,
config={},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -133,7 +127,6 @@ def test_remove_data_source_dumps_swallows_manifest_parent_directory_not_found_e
module.remove_data_source_dumps(
hook_config=None,
config={},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)

View file

@ -242,7 +242,6 @@ def test_dump_data_sources_snapshots_each_subvolume_and_updates_patterns():
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -300,7 +299,6 @@ def test_dump_data_sources_uses_custom_btrfs_command_in_commands():
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -356,7 +354,6 @@ def test_dump_data_sources_uses_custom_findmnt_command_in_commands():
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -397,7 +394,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshot_and_patterns_update():
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -422,7 +418,6 @@ def test_dump_data_sources_without_matching_subvolumes_skips_snapshot_and_patter
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -485,7 +480,6 @@ def test_dump_data_sources_snapshots_adds_to_existing_exclude_patterns():
module.dump_data_sources(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -595,7 +589,6 @@ def test_remove_data_source_dumps_deletes_snapshots():
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -613,7 +606,6 @@ def test_remove_data_source_dumps_without_hook_configuration_bails():
module.remove_data_source_dumps(
hook_config=None,
config={'source_directories': '/mnt/subvolume'},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -632,7 +624,6 @@ def test_remove_data_source_dumps_with_get_subvolumes_file_not_found_error_bails
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -653,7 +644,6 @@ def test_remove_data_source_dumps_with_get_subvolumes_called_process_error_bails
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -717,7 +707,6 @@ def test_remove_data_source_dumps_with_dry_run_skips_deletes():
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=True,
)
@ -736,7 +725,6 @@ def test_remove_data_source_dumps_without_subvolumes_skips_deletes():
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -780,7 +768,6 @@ def test_remove_data_source_without_snapshots_skips_deletes():
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -844,7 +831,6 @@ def test_remove_data_source_dumps_with_delete_snapshot_file_not_found_error_bail
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -910,7 +896,6 @@ def test_remove_data_source_dumps_with_delete_snapshot_called_process_error_bail
module.remove_data_source_dumps(
hook_config=config['btrfs'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)

View file

@ -220,7 +220,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -246,7 +245,6 @@ def test_dump_data_sources_with_no_logical_volumes_skips_snapshots():
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -317,7 +315,6 @@ def test_dump_data_sources_uses_snapshot_size_for_snapshot():
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -398,7 +395,6 @@ def test_dump_data_sources_uses_custom_commands():
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -450,7 +446,6 @@ def test_dump_data_sources_with_dry_run_skips_snapshots_and_does_not_touch_patte
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -518,7 +513,6 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -572,7 +566,6 @@ def test_dump_data_sources_with_missing_snapshot_errors():
module.dump_data_sources(
hook_config=config['lvm'],
config=config,
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -728,7 +721,6 @@ def test_remove_data_source_dumps_unmounts_and_remove_snapshots():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -745,7 +737,6 @@ def test_remove_data_source_dumps_bails_for_missing_lvm_configuration():
module.remove_data_source_dumps(
hook_config=None,
config={'source_directories': '/mnt/lvolume'},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -763,7 +754,6 @@ def test_remove_data_source_dumps_bails_for_missing_lsblk_command():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -783,7 +773,6 @@ def test_remove_data_source_dumps_bails_for_lsblk_command_error():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -828,7 +817,6 @@ def test_remove_data_source_dumps_with_missing_snapshot_directory_skips_unmount(
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -886,7 +874,6 @@ def test_remove_data_source_dumps_with_missing_snapshot_mount_path_skips_unmount
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -944,7 +931,6 @@ def test_remove_data_source_dumps_with_successful_mount_point_removal_skips_unmo
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -988,7 +974,6 @@ def test_remove_data_source_dumps_bails_for_missing_umount_command():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -1032,7 +1017,6 @@ def test_remove_data_source_dumps_bails_for_umount_command_error():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -1076,7 +1060,6 @@ def test_remove_data_source_dumps_bails_for_missing_lvs_command():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -1122,7 +1105,6 @@ def test_remove_data_source_dumps_bails_for_lvs_command_error():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -1165,7 +1147,6 @@ def test_remove_data_source_with_dry_run_skips_snapshot_unmount_and_delete():
module.remove_data_source_dumps(
hook_config=config['lvm'],
config=config,
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=True,
)

View file

@ -8,10 +8,9 @@ from borgmatic.hooks.data_source import mariadb as module
def test_database_names_to_dump_passes_through_name():
extra_environment = flexmock()
log_prefix = ''
names = module.database_names_to_dump(
{'name': 'foo'}, extra_environment, log_prefix, dry_run=False
{'name': 'foo'}, extra_environment, dry_run=False
)
assert names == ('foo',)
@ -19,11 +18,10 @@ def test_database_names_to_dump_passes_through_name():
def test_database_names_to_dump_bails_for_dry_run():
extra_environment = flexmock()
log_prefix = ''
flexmock(module).should_receive('execute_command_and_capture_output').never()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run=True
{'name': 'all'}, extra_environment, dry_run=True
)
assert names == ()
@ -31,14 +29,13 @@ def test_database_names_to_dump_bails_for_dry_run():
def test_database_names_to_dump_queries_mariadb_for_database_names():
extra_environment = flexmock()
log_prefix = ''
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('mariadb', '--skip-column-names', '--batch', '--execute', 'show schemas'),
extra_environment=extra_environment,
).and_return('foo\nbar\nmysql\n').once()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run=False
{'name': 'all'}, extra_environment, dry_run=False
)
assert names == ('foo', 'bar')
@ -46,12 +43,12 @@ def test_database_names_to_dump_queries_mariadb_for_database_names():
def test_use_streaming_true_for_any_databases():
assert module.use_streaming(
databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
databases=[flexmock(), flexmock()], config=flexmock(),
)
def test_use_streaming_false_for_no_databases():
assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
assert not module.use_streaming(databases=[], config=flexmock())
def test_dump_data_sources_dumps_each_database():
@ -65,7 +62,6 @@ def test_dump_data_sources_dumps_each_database():
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
@ -97,7 +93,6 @@ def test_dump_data_sources_dumps_with_password():
flexmock(module).should_receive('execute_dump_command').with_args(
database=database,
log_prefix=object,
dump_path=object,
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
@ -123,7 +118,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': 'all'},
log_prefix=object,
dump_path=object,
database_names=('foo', 'bar'),
extra_environment=object,
@ -151,7 +145,6 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name, 'format': 'sql'},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
@ -233,7 +226,6 @@ def test_execute_dump_command_runs_mariadb_dump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -265,7 +257,6 @@ def test_execute_dump_command_runs_mariadb_dump_without_add_drop_database():
assert (
module.execute_dump_command(
database={'name': 'foo', 'add_drop_database': False},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -304,7 +295,6 @@ def test_execute_dump_command_runs_mariadb_dump_with_hostname_and_port():
assert (
module.execute_dump_command(
database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -339,7 +329,6 @@ def test_execute_dump_command_runs_mariadb_dump_with_username_and_password():
assert (
module.execute_dump_command(
database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
@ -373,7 +362,6 @@ def test_execute_dump_command_runs_mariadb_dump_with_options():
assert (
module.execute_dump_command(
database={'name': 'foo', 'options': '--stuff=such'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -411,7 +399,6 @@ def test_execute_dump_command_runs_non_default_mariadb_dump_with_options():
'mariadb_dump_command': 'custom_mariadb_dump',
'options': '--stuff=such',
}, # Custom MariaDB dump command specified
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -431,7 +418,6 @@ def test_execute_dump_command_with_duplicate_dump_skips_mariadb_dump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -452,7 +438,6 @@ def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,

View file

@ -9,7 +9,6 @@ def test_use_streaming_true_for_any_non_directory_format_databases():
assert module.use_streaming(
databases=[{'format': 'stuff'}, {'format': 'directory'}, {}],
config=flexmock(),
log_prefix=flexmock(),
)
@ -17,12 +16,11 @@ def test_use_streaming_false_for_all_directory_format_databases():
assert not module.use_streaming(
databases=[{'format': 'directory'}, {'format': 'directory'}],
config=flexmock(),
log_prefix=flexmock(),
)
def test_use_streaming_false_for_no_databases():
assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
assert not module.use_streaming(databases=[], config=flexmock())
def test_dump_data_sources_runs_mongodump_for_each_database():

View file

@ -8,10 +8,9 @@ from borgmatic.hooks.data_source import mysql as module
def test_database_names_to_dump_passes_through_name():
extra_environment = flexmock()
log_prefix = ''
names = module.database_names_to_dump(
{'name': 'foo'}, extra_environment, log_prefix, dry_run=False
{'name': 'foo'}, extra_environment, dry_run=False
)
assert names == ('foo',)
@ -19,11 +18,10 @@ def test_database_names_to_dump_passes_through_name():
def test_database_names_to_dump_bails_for_dry_run():
extra_environment = flexmock()
log_prefix = ''
flexmock(module).should_receive('execute_command_and_capture_output').never()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run=True
{'name': 'all'}, extra_environment, dry_run=True
)
assert names == ()
@ -31,14 +29,13 @@ def test_database_names_to_dump_bails_for_dry_run():
def test_database_names_to_dump_queries_mysql_for_database_names():
extra_environment = flexmock()
log_prefix = ''
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('mysql', '--skip-column-names', '--batch', '--execute', 'show schemas'),
extra_environment=extra_environment,
).and_return('foo\nbar\nmysql\n').once()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run=False
{'name': 'all'}, extra_environment, dry_run=False
)
assert names == ('foo', 'bar')
@ -46,12 +43,12 @@ def test_database_names_to_dump_queries_mysql_for_database_names():
def test_use_streaming_true_for_any_databases():
assert module.use_streaming(
databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
databases=[flexmock(), flexmock()], config=flexmock(),
)
def test_use_streaming_false_for_no_databases():
assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
assert not module.use_streaming(databases=[], config=flexmock())
def test_dump_data_sources_dumps_each_database():
@ -65,7 +62,6 @@ def test_dump_data_sources_dumps_each_database():
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
@ -97,7 +93,6 @@ def test_dump_data_sources_dumps_with_password():
flexmock(module).should_receive('execute_dump_command').with_args(
database=database,
log_prefix=object,
dump_path=object,
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
@ -123,7 +118,6 @@ def test_dump_data_sources_dumps_all_databases_at_once():
flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': 'all'},
log_prefix=object,
dump_path=object,
database_names=('foo', 'bar'),
extra_environment=object,
@ -151,7 +145,6 @@ def test_dump_data_sources_dumps_all_databases_separately_when_format_configured
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name, 'format': 'sql'},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
@ -233,7 +226,6 @@ def test_execute_dump_command_runs_mysqldump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -265,7 +257,6 @@ def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
assert (
module.execute_dump_command(
database={'name': 'foo', 'add_drop_database': False},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -304,7 +295,6 @@ def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
assert (
module.execute_dump_command(
database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -339,7 +329,6 @@ def test_execute_dump_command_runs_mysqldump_with_username_and_password():
assert (
module.execute_dump_command(
database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
@ -373,7 +362,6 @@ def test_execute_dump_command_runs_mysqldump_with_options():
assert (
module.execute_dump_command(
database={'name': 'foo', 'options': '--stuff=such'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -409,7 +397,6 @@ def test_execute_dump_command_runs_non_default_mysqldump():
'name': 'foo',
'mysql_dump_command': 'custom_mysqldump',
}, # Custom MySQL dump command specified
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -429,7 +416,6 @@ def test_execute_dump_command_with_duplicate_dump_skips_mysqldump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
@ -450,7 +436,6 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,

View file

@ -203,7 +203,6 @@ def test_use_streaming_true_for_any_non_directory_format_databases():
assert module.use_streaming(
databases=[{'format': 'stuff'}, {'format': 'directory'}, {}],
config=flexmock(),
log_prefix=flexmock(),
)
@ -211,12 +210,11 @@ def test_use_streaming_false_for_all_directory_format_databases():
assert not module.use_streaming(
databases=[{'format': 'directory'}, {'format': 'directory'}],
config=flexmock(),
log_prefix=flexmock(),
)
def test_use_streaming_false_for_no_databases():
assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
assert not module.use_streaming(databases=[], config=flexmock())
def test_dump_data_sources_runs_pg_dump_for_each_database():

View file

@ -7,12 +7,12 @@ from borgmatic.hooks.data_source import sqlite as module
def test_use_streaming_true_for_any_databases():
assert module.use_streaming(
databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock()
databases=[flexmock(), flexmock()], config=flexmock(),
)
def test_use_streaming_false_for_no_databases():
assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock())
assert not module.use_streaming(databases=[], config=flexmock())
def test_dump_data_sources_logs_and_skips_if_dump_already_exists():

View file

@ -154,7 +154,6 @@ def test_dump_data_sources_snapshots_and_mounts_and_updates_patterns():
module.dump_data_sources(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -177,7 +176,6 @@ def test_dump_data_sources_with_no_datasets_skips_snapshots():
module.dump_data_sources(
hook_config={},
config={'patterns': flexmock(), 'zfs': {}},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -227,7 +225,6 @@ def test_dump_data_sources_uses_custom_commands():
'patterns': flexmock(),
'zfs': hook_config,
},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -252,7 +249,6 @@ def test_dump_data_sources_with_dry_run_skips_commands_and_does_not_touch_patter
module.dump_data_sources(
hook_config={},
config={'patterns': ('R /mnt/dataset',), 'zfs': {}},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -295,7 +291,6 @@ def test_dump_data_sources_ignores_mismatch_between_given_patterns_and_contained
module.dump_data_sources(
hook_config={},
config={'patterns': ('R /mnt/dataset',), 'zfs': {}},
log_prefix='test',
config_paths=('test.yaml',),
borgmatic_runtime_directory='/run/borgmatic',
patterns=patterns,
@ -338,7 +333,6 @@ def test_remove_data_source_dumps_unmounts_and_destroys_snapshots():
module.remove_data_source_dumps(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -366,7 +360,6 @@ def test_remove_data_source_dumps_use_custom_commands():
module.remove_data_source_dumps(
hook_config=hook_config,
config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -381,7 +374,6 @@ def test_remove_data_source_dumps_bails_for_missing_hook_configuration():
module.remove_data_source_dumps(
hook_config=None,
config={'source_directories': '/mnt/dataset'},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -397,7 +389,6 @@ def test_remove_data_source_dumps_bails_for_missing_zfs_command():
module.remove_data_source_dumps(
hook_config=hook_config,
config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -415,7 +406,6 @@ def test_remove_data_source_dumps_bails_for_zfs_command_error():
module.remove_data_source_dumps(
hook_config=hook_config,
config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -439,7 +429,6 @@ def test_remove_data_source_dumps_bails_for_missing_umount_command():
module.remove_data_source_dumps(
hook_config=hook_config,
config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -463,7 +452,6 @@ def test_remove_data_source_dumps_bails_for_umount_command_error():
module.remove_data_source_dumps(
hook_config=hook_config,
config={'source_directories': '/mnt/dataset', 'zfs': hook_config},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -488,7 +476,6 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_directories_that_are_no
module.remove_data_source_dumps(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -518,7 +505,6 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_mount_paths_that_are_no
module.remove_data_source_dumps(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -548,7 +534,6 @@ def test_remove_data_source_dumps_skips_unmount_snapshot_mount_paths_after_rmtre
module.remove_data_source_dumps(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=False,
)
@ -571,7 +556,6 @@ def test_remove_data_source_dumps_with_dry_run_skips_unmount_and_destroy():
module.remove_data_source_dumps(
hook_config={},
config={'source_directories': '/mnt/dataset', 'zfs': {}},
log_prefix='test',
borgmatic_runtime_directory='/run/borgmatic',
dry_run=True,
)

View file

@ -7,13 +7,13 @@ from borgmatic.hooks import command as module
def test_interpolate_context_passes_through_command_without_variable():
assert module.interpolate_context('test.yaml', 'pre-backup', 'ls', {'foo': 'bar'}) == 'ls'
assert module.interpolate_context('pre-backup', 'ls', {'foo': 'bar'}) == 'ls'
def test_interpolate_context_passes_through_command_with_unknown_variable():
command = 'ls {baz}' # noqa: FS003
assert module.interpolate_context('test.yaml', 'pre-backup', command, {'foo': 'bar'}) == command
assert module.interpolate_context('pre-backup', command, {'foo': 'bar'}) == command
def test_interpolate_context_interpolates_variables():
@ -21,7 +21,7 @@ def test_interpolate_context_interpolates_variables():
context = {'foo': 'bar', 'baz': 'quux'}
assert (
module.interpolate_context('test.yaml', 'pre-backup', command, context) == 'ls barquux quux'
module.interpolate_context('pre-backup', command, context) == 'ls barquux quux'
)
@ -30,7 +30,7 @@ def test_interpolate_context_escapes_interpolated_variables():
context = {'foo': 'bar', 'inject': 'hi; naughty-command'}
assert (
module.interpolate_context('test.yaml', 'pre-backup', command, context)
module.interpolate_context('pre-backup', command, context)
== "ls bar 'hi; naughty-command'"
)
@ -53,7 +53,7 @@ def test_make_environment_with_pyinstaller_and_LD_LIBRARY_PATH_ORIG_copies_it_in
def test_execute_hook_invokes_each_command():
flexmock(module).should_receive('interpolate_context').replace_with(
lambda config_file, hook_description, command, context: command
lambda hook_description, command, context: command
)
flexmock(module).should_receive('make_environment').and_return({})
flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@ -68,7 +68,7 @@ def test_execute_hook_invokes_each_command():
def test_execute_hook_with_multiple_commands_invokes_each_command():
flexmock(module).should_receive('interpolate_context').replace_with(
lambda config_file, hook_description, command, context: command
lambda hook_description, command, context: command
)
flexmock(module).should_receive('make_environment').and_return({})
flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@ -89,7 +89,7 @@ def test_execute_hook_with_multiple_commands_invokes_each_command():
def test_execute_hook_with_umask_sets_that_umask():
flexmock(module).should_receive('interpolate_context').replace_with(
lambda config_file, hook_description, command, context: command
lambda hook_description, command, context: command
)
flexmock(module.os).should_receive('umask').with_args(0o77).and_return(0o22).once()
flexmock(module.os).should_receive('umask').with_args(0o22).once()
@ -106,7 +106,7 @@ def test_execute_hook_with_umask_sets_that_umask():
def test_execute_hook_with_dry_run_skips_commands():
flexmock(module).should_receive('interpolate_context').replace_with(
lambda config_file, hook_description, command, context: command
lambda hook_description, command, context: command
)
flexmock(module).should_receive('make_environment').and_return({})
flexmock(module.borgmatic.execute).should_receive('execute_command').never()
@ -120,7 +120,7 @@ def test_execute_hook_with_empty_commands_does_not_raise():
def test_execute_hook_on_error_logs_as_error():
flexmock(module).should_receive('interpolate_context').replace_with(
lambda config_file, hook_description, command, context: command
lambda hook_description, command, context: command
)
flexmock(module).should_receive('make_environment').and_return({})
flexmock(module.borgmatic.execute).should_receive('execute_command').with_args(
@ -136,14 +136,14 @@ def test_execute_hook_on_error_logs_as_error():
def test_considered_soft_failure_treats_soft_fail_exit_code_as_soft_fail():
error = subprocess.CalledProcessError(module.SOFT_FAIL_EXIT_CODE, 'try again')
assert module.considered_soft_failure('config.yaml', error)
assert module.considered_soft_failure(error)
def test_considered_soft_failure_does_not_treat_other_exit_code_as_soft_fail():
error = subprocess.CalledProcessError(1, 'error')
assert not module.considered_soft_failure('config.yaml', error)
assert not module.considered_soft_failure(error)
def test_considered_soft_failure_does_not_treat_other_exception_type_as_soft_fail():
assert not module.considered_soft_failure('config.yaml', Exception())
assert not module.considered_soft_failure(Exception())

View file

@ -6,7 +6,7 @@ from flexmock import flexmock
from borgmatic.hooks import dispatch as module
def hook_function(hook_config, config, log_prefix, thing, value):
def hook_function(hook_config, config, thing, value):
'''
This test function gets mocked out below.
'''