• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pyiron / executorlib / 11907763573

19 Nov 2024 06:52AM UTC coverage: 95.648%. Remained the same
11907763573

push

github

web-flow
[pre-commit.ci] pre-commit autoupdate (#502)

* [pre-commit.ci] pre-commit autoupdate

updates:
- [github.com/astral-sh/ruff-pre-commit: v0.7.3 → v0.7.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.3...v0.7.4)

* Update environment-openmpi.yml

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Jan Janssen <jan-janssen@users.noreply.github.com>

945 of 988 relevant lines covered (95.65%)

0.96 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

93.94
/executorlib/standalone/hdf.py
1
from typing import Optional, Tuple
1✔
2

3
import cloudpickle
1✔
4
import h5py
1✔
5
import numpy as np
1✔
6

7

8
def dump(file_name: str, data_dict: dict) -> None:
1✔
9
    """
10
    Dump data dictionary into HDF5 file
11

12
    Args:
13
        file_name (str): file name of the HDF5 file as absolute path
14
        data_dict (dict): dictionary containing the python function to be executed {"fn": ..., "args": (), "kwargs": {}}
15
    """
16
    group_dict = {
1✔
17
        "fn": "function",
18
        "args": "input_args",
19
        "kwargs": "input_kwargs",
20
        "output": "output",
21
        "queue_id": "queue_id",
22
    }
23
    with h5py.File(file_name, "a") as fname:
1✔
24
        for data_key, data_value in data_dict.items():
1✔
25
            if data_key in group_dict.keys():
1✔
26
                fname.create_dataset(
1✔
27
                    name="/" + group_dict[data_key],
28
                    data=np.void(cloudpickle.dumps(data_value)),
29
                )
30

31

32
def load(file_name: str) -> dict:
1✔
33
    """
34
    Load data dictionary from HDF5 file
35

36
    Args:
37
        file_name (str): file name of the HDF5 file as absolute path
38

39
    Returns:
40
        dict: dictionary containing the python function to be executed {"fn": ..., "args": (), "kwargs": {}}
41
    """
42
    with h5py.File(file_name, "r") as hdf:
1✔
43
        data_dict = {}
1✔
44
        if "function" in hdf:
1✔
45
            data_dict["fn"] = cloudpickle.loads(np.void(hdf["/function"]))
1✔
46
        else:
47
            raise TypeError("Function not found in HDF5 file.")
×
48
        if "input_args" in hdf:
1✔
49
            data_dict["args"] = cloudpickle.loads(np.void(hdf["/input_args"]))
1✔
50
        else:
51
            data_dict["args"] = ()
×
52
        if "input_kwargs" in hdf:
1✔
53
            data_dict["kwargs"] = cloudpickle.loads(np.void(hdf["/input_kwargs"]))
1✔
54
        else:
55
            data_dict["kwargs"] = {}
1✔
56
        return data_dict
1✔
57

58

59
def get_output(file_name: str) -> Tuple[bool, object]:
1✔
60
    """
61
    Check if output is available in the HDF5 file
62

63
    Args:
64
        file_name (str): file name of the HDF5 file as absolute path
65

66
    Returns:
67
        Tuple[bool, object]: boolean flag indicating if output is available and the output object itself
68
    """
69
    with h5py.File(file_name, "r") as hdf:
1✔
70
        if "output" in hdf:
1✔
71
            return True, cloudpickle.loads(np.void(hdf["/output"]))
1✔
72
        else:
73
            return False, None
1✔
74

75

76
def get_queue_id(file_name: str) -> Optional[int]:
1✔
77
    with h5py.File(file_name, "r") as hdf:
1✔
78
        if "queue_id" in hdf:
1✔
79
            return cloudpickle.loads(np.void(hdf["/queue_id"]))
1✔
80
        else:
81
            return None
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc