Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jan 22, 2025
1 parent 1aa81ba commit ac93e54
Show file tree
Hide file tree
Showing 10 changed files with 78 additions and 117 deletions.
20 changes: 10 additions & 10 deletions src/aiida/cmdline/commands/cmd_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,25 +608,25 @@ def process_dump(
from aiida.tools.archive.exceptions import ExportValidationError
from aiida.tools.dumping.data import DataDumper
from aiida.tools.dumping.processes import ProcessDumper
from aiida.tools.dumping.data import DataDumper

# from aiida.tools.dumping.utils import validate_rich_options
from aiida.tools.dumping.rich import rich_from_cli

processdumper_kwargs = {
"include_inputs": include_inputs,
"include_outputs": include_outputs,
"include_attributes": include_attributes,
"include_extras": include_extras,
"flat": flat,
'include_inputs': include_inputs,
'include_outputs': include_outputs,
'include_attributes': include_attributes,
'include_extras': include_extras,
'flat': flat,
}

rich_kwargs = {
"rich_dump_all": rich_dump_all,
'rich_dump_all': rich_dump_all,
}

datadumper_kwargs = {
"also_raw": also_raw,
"also_rich": also_rich,
'also_raw': also_raw,
'also_rich': also_rich,
}

# if also_rich:
Expand All @@ -644,7 +644,7 @@ def process_dump(

data_dumper = DataDumper(
overwrite=overwrite,
rich_spec_dict = rich_spec_dict,
rich_spec_dict=rich_spec_dict,
**datadumper_kwargs,
**rich_kwargs,
)
Expand Down
107 changes: 43 additions & 64 deletions src/aiida/cmdline/commands/cmd_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from aiida.cmdline.utils import decorators, echo
from aiida.common import exceptions
from aiida.tools.dumping import CollectionDumper, DataDumper, ProcessDumper
from aiida.tools.dumping.utils import dumper_pretty_print


@verdi.group('storage')
Expand Down Expand Up @@ -306,12 +305,9 @@ def storage_dump(
groups,
):
"""Dump all data in an AiiDA profile's storage to disk."""
from rich.pretty import pprint


from aiida import orm
import time
import json
from aiida.tools.dumping.collection import DEFAULT_ENTITIES_TO_DUMP
from aiida.tools.dumping.parser import DumpConfigParser
from aiida.tools.dumping.rich import (
DEFAULT_CORE_EXPORT_MAPPING,
Expand All @@ -320,8 +316,7 @@ def storage_dump(
)
from aiida.tools.dumping.utils import validate_make_dump_path


profile = ctx.obj["profile"]
profile = ctx.obj['profile']
profile_name = profile.name

# from aiida.manage.manager import get_manager
Expand Down Expand Up @@ -353,37 +348,36 @@ def storage_dump(
# echo.echo_critical('`all_entries` and `groups` specified. Set only one.')

if dump_config_file is None:

general_kwargs = {
"path": path,
"overwrite": overwrite,
"incremental": incremental,
"dry_run": dry_run,
'path': path,
'overwrite': overwrite,
'incremental': incremental,
'dry_run': dry_run,
}

processdumper_kwargs = {
"include_inputs": include_inputs,
"include_outputs": include_outputs,
"include_attributes": include_attributes,
"include_extras": include_extras,
"flat": flat,
'include_inputs': include_inputs,
'include_outputs': include_outputs,
'include_attributes': include_attributes,
'include_extras': include_extras,
'flat': flat,
# "calculations_hidden": calculations_hidden
}

datadumper_kwargs = {
"also_raw": also_raw,
"also_rich": also_rich,
"data_hidden": data_hidden,
'also_raw': also_raw,
'also_rich': also_rich,
'data_hidden': data_hidden,
}

collection_kwargs = {
"should_dump_processes": dump_processes,
"should_dump_data": dump_data,
"only_top_level_workflows": only_top_level_workflows,
'should_dump_processes': dump_processes,
'should_dump_data': dump_data,
'only_top_level_workflows': only_top_level_workflows,
}

rich_kwargs = {
"rich_dump_all": rich_dump_all,
'rich_dump_all': rich_dump_all,
}

if rich_spec is not None:
Expand All @@ -396,47 +390,38 @@ def storage_dump(
else:
kwarg_dicts_from_config = DumpConfigParser.parse_config_file(dump_config_file)

general_kwargs = kwarg_dicts_from_config["general_kwargs"]
processdumper_kwargs = kwarg_dicts_from_config["processdumper_kwargs"]
datadumper_kwargs = kwarg_dicts_from_config["datadumper_kwargs"]
collection_kwargs = kwarg_dicts_from_config["collection_kwargs"]
rich_kwargs = kwarg_dicts_from_config["rich_kwargs"]
general_kwargs = kwarg_dicts_from_config['general_kwargs']
processdumper_kwargs = kwarg_dicts_from_config['processdumper_kwargs']
datadumper_kwargs = kwarg_dicts_from_config['datadumper_kwargs']
collection_kwargs = kwarg_dicts_from_config['collection_kwargs']
rich_kwargs = kwarg_dicts_from_config['rich_kwargs']

rich_spec_dict = rich_from_config(
kwarg_dicts_from_config["rich_spec"], **rich_kwargs
)
rich_spec_dict = rich_from_config(kwarg_dicts_from_config['rich_spec'], **rich_kwargs)

# Obtain these specifically for easy access and modifications
path = general_kwargs["path"]
overwrite = general_kwargs["overwrite"]
dry_run = general_kwargs["dry_run"]
incremental = general_kwargs["incremental"]
path = general_kwargs['path']
overwrite = general_kwargs['overwrite']
dry_run = general_kwargs['dry_run']
incremental = general_kwargs['incremental']

if not overwrite and incremental:
echo.echo_report(
"Overwrite set to false, but incremental dumping selected. Will keep existing directories."
)
echo.echo_report('Overwrite set to false, but incremental dumping selected. Will keep existing directories.')

if not str(path).endswith(profile.name):
path /= profile.name

# TODO: Implement proper dry-run feature
dry_run_message = (
f"Dry run for dumping of profile `{profile.name}`'s data at path: `{path}`.\n"
)
dry_run_message += "Only directories will be created."
dry_run_message = f"Dry run for dumping of profile `{profile.name}`'s data at path: `{path}`.\n"
dry_run_message += 'Only directories will be created.'

if dry_run or (
not collection_kwargs["should_dump_processes"]
and not collection_kwargs["should_dump_data"]
):
if dry_run or (not collection_kwargs['should_dump_processes'] and not collection_kwargs['should_dump_data']):
echo.echo_report(dry_run_message)
return

else:
echo.echo_report(f"Dumping of profile `{profile.name}`'s data at path: `{path}`.")

SAFEGUARD_FILE = ".verdi_storage_dump" # noqa: N806
SAFEGUARD_FILE = '.verdi_storage_dump' # noqa: N806

try:
validate_make_dump_path(
Expand Down Expand Up @@ -470,6 +455,7 @@ def storage_dump(
# dumper_pretty_print(process_dumper)

from aiida.tools.dumping.incremental import DumpNodeCollector

dumpnodecollector = DumpNodeCollector(dump_parent_path=path)

dumpnodecollector.update_uuids_before_dump()
Expand All @@ -494,22 +480,18 @@ def storage_dump(
**rich_kwargs,
data_dumper=data_dumper,
process_dumper=process_dumper,
deduplicate=deduplicate
deduplicate=deduplicate,
)
collection_dumper.create_entity_counter()
# dumper_pretty_print(collection_dumper, include_private_and_dunder=False)

if dump_processes and collection_dumper._should_dump_processes():
echo.echo_report(
f"Dumping processes not in any group for profile `{profile.name}`..."
)
echo.echo_report(f'Dumping processes not in any group for profile `{profile.name}`...')
collection_dumper.dump_processes()
if dump_data:
if not also_rich and not also_raw:
echo.echo_critical(
"`--dump-data was given, but neither --also-raw or --also-rich specified."
)
echo.echo_report(f"Dumping data not in any group for profile {profile.name}...")
echo.echo_critical('`--dump-data was given, but neither --also-raw or --also-rich specified.')
echo.echo_report(f'Dumping data not in any group for profile {profile.name}...')

collection_dumper.dump_data_rich()
# collection_dumper.dump_plugin_data()
Expand All @@ -518,14 +500,14 @@ def storage_dump(
# TODO: Invert default behavior here, as I typically want to dump all entries
# TODO: Possibly define a new click option instead
# all_entries = not all_entries
if not groups: # and all_entries:
if not groups: # and all_entries:
groups = orm.QueryBuilder().append(orm.Group).all(flat=True)

if groups is not None and not nodes:
for group in groups:
if organize_by_groups:
group_subdir = Path(*group.type_string.split("."))
group_path = path / "groups" / group_subdir / group.label
group_subdir = Path(*group.type_string.split('.'))
group_path = path / 'groups' / group_subdir / group.label
else:
group_path = path

Expand All @@ -546,12 +528,9 @@ def storage_dump(
# "Dumping processes for group `SSSP/1.3/PBE/efficiency`" is printed for groups that
# don't contain processes
if collection_dumper._should_dump_processes():
echo.echo_report(f"Dumping processes for group `{group.label}`...")
echo.echo_report(f'Dumping processes for group `{group.label}`...')
collection_dumper.dump_processes()
if dump_data:
echo.echo_report(f"Dumping data for group `{group.label}`...")
echo.echo_report(f'Dumping data for group `{group.label}`...')
collection_dumper.dump_data_rich()
# collection_dumper.dump_plugin_data()



16 changes: 8 additions & 8 deletions src/aiida/cmdline/params/options/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,9 @@
'DICT_FORMAT',
'DICT_KEYS',
'DRY_RUN',
'DUMP_PROCESSES',
'DUMP_CONFIG_FILE',
'DUMP_DATA',
'DUMP_PROCESSES',
'EXIT_STATUS',
'EXPORT_FORMAT',
'FAILED',
Expand All @@ -76,11 +77,11 @@
'GROUP_CLEAR',
'HOSTNAME',
'IDENTIFIER',
'INCREMENTAL',
'INCLUDE_INPUTS',
'INCLUDE_OUTPUTS',
'INCLUDE_ATTRIBUTES',
'INCLUDE_EXTRAS',
'INCLUDE_INPUTS',
'INCLUDE_OUTPUTS',
'INCREMENTAL',
'INCREMENTAL',
'INPUT_FORMAT',
'INPUT_PLUGIN',
Expand All @@ -90,8 +91,8 @@
'NODE',
'NODES',
'NON_INTERACTIVE',
'ONLY_TOP_LEVEL_WORKFLOWS',
'OLDER_THAN',
'ONLY_TOP_LEVEL_WORKFLOWS',
'ORDER_BY',
'ORDER_DIRECTION',
'ORGANIZE_BY_GROUPS',
Expand All @@ -110,9 +111,8 @@
'PROJECT',
'RAW',
'REPOSITORY_PATH',
'RICH_SPEC',
'DUMP_CONFIG_FILE',
'RICH_DUMP_ALL',
'RICH_SPEC',
'SCHEDULER',
'SILENT',
'SORT',
Expand Down Expand Up @@ -870,7 +870,7 @@ def set_log_level(ctx, _param, value):
is_flag=True,
type=bool,
show_default=True,
help='If a rich specification is provided, this triggers if all other Data nodes should also be dumped or not.'
help='If a rich specification is provided, this triggers if all other Data nodes should also be dumped or not.',
)

ORGANIZE_BY_GROUPS = OverridableOption(
Expand Down
2 changes: 1 addition & 1 deletion src/aiida/tools/dumping/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
from .data import DataDumper
from .processes import ProcessDumper

__all__ = ('ProcessDumper', 'DataDumper', 'CollectionDumper')
__all__ = ('CollectionDumper', 'DataDumper', 'ProcessDumper')
9 changes: 3 additions & 6 deletions src/aiida/tools/dumping/incremental.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
from aiida import orm
from aiida.manage import get_manager
from pathlib import Path
import json
import itertools as it
from rich.pretty import pprint
from typing import Any
from pathlib import Path

from aiida import orm

# TODO: flat list using UUIDs, or sorted by type?
# TODO: individual lists, ids, uuids, etc., or sorted: by data, or more fine-grained
Expand Down
3 changes: 1 addition & 2 deletions src/aiida/tools/dumping/rich.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@

from aiida.cmdline.commands.cmd_data.cmd_export import data_export

__all__ = ('rich_from_cli', 'rich_from_config', 'DEFAULT_CORE_EXPORT_MAPPING')
__all__ = ('DEFAULT_CORE_EXPORT_MAPPING', 'rich_from_cli', 'rich_from_config')

DEFAULT_CORE_EXPORT_MAPPING = {
'core.array': {'exporter': data_export, 'export_format': 'json'},
Expand Down
2 changes: 0 additions & 2 deletions src/aiida/tools/dumping/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,6 @@ def prepare_dump_path(
(path_to_validate / safeguard_file).touch()




def get_nodes_from_db(qb_instance, qb_filters: t.List | None = None, flat=False):
# Computers cannot be associated via `with_group`
# for qb_filter in qb_filters:
Expand Down
4 changes: 3 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -956,8 +956,10 @@ def cat_path() -> Path:
@pytest.fixture
def generate_calculation_node_io(generate_calculation_node, tmp_path):
def _generate_calculation_node_io(entry_point: str | None = None, attach_outputs: bool = True):
import numpy as np
import io

import numpy as np

from aiida.orm import ArrayData, FolderData, SinglefileData

filename = 'file.txt'
Expand Down
Loading

0 comments on commit ac93e54

Please sign in to comment.