Skip to content

Commit

Permalink
introduce write_dict_to_hdf()
Browse files Browse the repository at this point in the history
  • Loading branch information
jan-janssen committed Dec 18, 2023
1 parent faeb7cb commit 0d479d2
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 10 deletions.
5 changes: 2 additions & 3 deletions pyiron_base/storage/hdfio.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@
open_hdf5,
read_hdf5,
write_hdf5_with_json_support,
write_dict_to_hdf,
_is_ragged_in_1st_dim_only,
)
from pyiron_base.interfaces.has_groups import HasGroups
from pyiron_base.state import state
from pyiron_base.jobs.dynamic import JOB_DYN_DICT, class_constructor
from pyiron_base.jobs.job.util import _get_safe_job_name
import pyiron_base.project.maintenance
import warnings

__author__ = "Joerg Neugebauer, Jan Janssen"
__copyright__ = (
Expand Down Expand Up @@ -846,8 +846,7 @@ def hd_copy(self, hdf_old, hdf_new, exclude_groups=None, exclude_nodes=None):
(set(hdf_old.list_nodes()) ^ set(check_nodes))
& set(hdf_old.list_nodes())
)
for p in node_list:
hdf_new[p] = hdf_old[p]
write_dict_to_hdf(hdf=hdf_new, data_dict={p: hdf_old[p] for p in node_list})
for p in group_list:
h_new = hdf_new.create_group(p)
ex_n = [e[-1] for e in exclude_nodes_split if p == e[0] or len(e) == 1]
Expand Down
35 changes: 28 additions & 7 deletions pyiron_base/storage/helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,34 @@ def write_hdf5(

def write_hdf5_with_json_support(value, path, file_handle):
value, use_json = _check_json_conversion(value=value)
write_hdf5(
file_handle,
value,
title=path,
overwrite="update",
use_json=use_json,
)
try:
write_hdf5(
file_handle,
value,
title=path,
overwrite="update",
use_json=use_json,
)
except TypeError:
raise TypeError(
"Error saving {} (key {}): DataContainer doesn't support saving elements "
'of type "{}" to HDF!'.format(value, path, type(value))
) from None


def write_dict_to_hdf(hdf, data_dict, groups=[]):
with open_hdf5(hdf.file_name, mode="a") as store:
for k, v in data_dict.items():
if k not in groups:
write_hdf5_with_json_support(
file_handle=store, value=v, path=hdf.get_h5_path(k)
)
for group in groups:
hdf_group = hdf.create_group(group)
for k, v in data_dict[group].items():
write_hdf5_with_json_support(
file_handle=store, value=v, path=hdf_group.get_h5_path(k)
)


def _check_json_conversion(value):
Expand Down

0 comments on commit 0d479d2

Please sign in to comment.