• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pyiron / executorlib / 12367015943

17 Dec 2024 05:32AM UTC coverage: 95.743% (+0.04%) from 95.704%
12367015943

Pull #525

github

web-flow
Merge d1e72c424 into ea305dab6
Pull Request #525: Get data from cache

12 of 12 new or added lines in 1 file covered. (100.0%)

2 existing lines in 1 file now uncovered.

967 of 1010 relevant lines covered (95.74%)

0.96 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

95.74
/executorlib/standalone/hdf.py
1
import os
1✔
2
from typing import Optional, Tuple, List
1✔
3

4
import cloudpickle
1✔
5
import h5py
1✔
6
import numpy as np
1✔
7

8

9
group_dict = {
1✔
10
    "fn": "function",
11
    "args": "input_args",
12
    "kwargs": "input_kwargs",
13
    "output": "output",
14
    "runtime": "runtime",
15
    "queue_id": "queue_id",
16
}
17

18

19
def dump(file_name: str, data_dict: dict) -> None:
1✔
20
    """
21
    Dump data dictionary into HDF5 file
22

23
    Args:
24
        file_name (str): file name of the HDF5 file as absolute path
25
        data_dict (dict): dictionary containing the python function to be executed {"fn": ..., "args": (), "kwargs": {}}
26
    """
27
    with h5py.File(file_name, "a") as fname:
1✔
28
        for data_key, data_value in data_dict.items():
1✔
29
            if data_key in group_dict.keys():
1✔
30
                fname.create_dataset(
1✔
31
                    name="/" + group_dict[data_key],
32
                    data=np.void(cloudpickle.dumps(data_value)),
33
                )
34

35

36
def load(file_name: str) -> dict:
1✔
37
    """
38
    Load data dictionary from HDF5 file
39

40
    Args:
41
        file_name (str): file name of the HDF5 file as absolute path
42

43
    Returns:
44
        dict: dictionary containing the python function to be executed {"fn": ..., "args": (), "kwargs": {}}
45
    """
46
    with h5py.File(file_name, "r") as hdf:
1✔
47
        data_dict = {}
1✔
48
        if "function" in hdf:
1✔
49
            data_dict["fn"] = cloudpickle.loads(np.void(hdf["/function"]))
1✔
50
        else:
UNCOV
51
            raise TypeError("Function not found in HDF5 file.")
×
52
        if "input_args" in hdf:
1✔
53
            data_dict["args"] = cloudpickle.loads(np.void(hdf["/input_args"]))
1✔
54
        else:
UNCOV
55
            data_dict["args"] = ()
×
56
        if "input_kwargs" in hdf:
1✔
57
            data_dict["kwargs"] = cloudpickle.loads(np.void(hdf["/input_kwargs"]))
1✔
58
        else:
59
            data_dict["kwargs"] = {}
1✔
60
        return data_dict
1✔
61

62

63
def get_output(file_name: str) -> Tuple[bool, object]:
1✔
64
    """
65
    Check if output is available in the HDF5 file
66

67
    Args:
68
        file_name (str): file name of the HDF5 file as absolute path
69

70
    Returns:
71
        Tuple[bool, object]: boolean flag indicating if output is available and the output object itself
72
    """
73
    with h5py.File(file_name, "r") as hdf:
1✔
74
        if "output" in hdf:
1✔
75
            return True, cloudpickle.loads(np.void(hdf["/output"]))
1✔
76
        else:
77
            return False, None
1✔
78

79

80
def get_runtime(file_name: str) -> float:
1✔
81
    """
82
    Get run time from HDF5 file
83

84
    Args:
85
        file_name (str): file name of the HDF5 file as absolute path
86

87
    Returns:
88
        float: run time from the execution of the python function
89
    """
90
    with h5py.File(file_name, "r") as hdf:
1✔
91
        if "runtime" in hdf:
1✔
92
            return cloudpickle.loads(np.void(hdf["/runtime"]))
1✔
93
        else:
94
            return 0.0
1✔
95

96

97
def get_queue_id(file_name: str) -> Optional[int]:
1✔
98
    with h5py.File(file_name, "r") as hdf:
1✔
99
        if "queue_id" in hdf:
1✔
100
            return cloudpickle.loads(np.void(hdf["/queue_id"]))
1✔
101
        else:
102
            return None
1✔
103

104

105
def get_cache_data(cache_directory: str) -> List[dict]:
1✔
106
    file_lst = []
1✔
107
    for file_name in os.listdir(cache_directory):
1✔
108
        with h5py.File(os.path.join(cache_directory, file_name), "r") as hdf:
1✔
109
            file_content_dict = {
1✔
110
                key: cloudpickle.loads(np.void(hdf["/" + key]))
111
                for key in group_dict.values() if key in hdf
112
            }
113
        file_content_dict["filename"] = file_name
1✔
114
        file_lst.append(file_content_dict)
1✔
115
    return file_lst
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc