• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pyiron / executorlib / 11541431333

27 Oct 2024 02:48PM UTC coverage: 94.012% (-0.6%) from 94.582%
11541431333

push

github

web-flow
Split shared cache in backend and frontend (#443)

* Split shared cache in backend and frontend

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>

2 of 2 new or added lines in 1 file covered. (100.0%)

4 existing lines in 1 file now uncovered.

785 of 835 relevant lines covered (94.01%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

100.0
/executorlib/interactive/dependencies.py
1
from concurrent.futures import Future
1✔
2
from typing import Any, Callable, Dict
1✔
3

4
from executorlib.interactive import create_executor
1✔
5
from executorlib.shared.executor import ExecutorSteps, execute_tasks_with_dependencies
1✔
6
from executorlib.shared.plot import draw, generate_nodes_and_edges, generate_task_hash
1✔
7
from executorlib.shared.thread import RaisingThread
1✔
8

9

10
class ExecutorWithDependencies(ExecutorSteps):
1✔
11
    """
12
    ExecutorWithDependencies is a class that extends ExecutorSteps and provides
13
    functionality for executing tasks with dependencies.
14

15
    Args:
16
        refresh_rate (float, optional): The refresh rate for updating the executor queue. Defaults to 0.01.
17
        plot_dependency_graph (bool, optional): Whether to generate and plot the dependency graph. Defaults to False.
18
        *args: Variable length argument list.
19
        **kwargs: Arbitrary keyword arguments.
20

21
    Attributes:
22
        _future_hash_dict (Dict[str, Future]): A dictionary mapping task hash to future object.
23
        _task_hash_dict (Dict[str, Dict]): A dictionary mapping task hash to task dictionary.
24
        _generate_dependency_graph (bool): Whether to generate the dependency graph.
25

26
    """
27

28
    def __init__(
1✔
29
        self,
30
        *args: Any,
31
        refresh_rate: float = 0.01,
32
        plot_dependency_graph: bool = False,
33
        **kwargs: Any,
34
    ) -> None:
35
        super().__init__()
1✔
36
        executor = create_executor(*args, **kwargs)
1✔
37
        self._set_process(
1✔
38
            RaisingThread(
39
                target=execute_tasks_with_dependencies,
40
                kwargs={
41
                    # Executor Arguments
42
                    "future_queue": self._future_queue,
43
                    "executor_queue": executor._future_queue,
44
                    "executor": executor,
45
                    "refresh_rate": refresh_rate,
46
                },
47
            )
48
        )
49
        self._future_hash_dict = {}
1✔
50
        self._task_hash_dict = {}
1✔
51
        self._generate_dependency_graph = plot_dependency_graph
1✔
52

53
    def submit(
1✔
54
        self,
55
        fn: Callable[..., Any],
56
        *args: Any,
57
        resource_dict: Dict[str, Any] = {},
58
        **kwargs: Any,
59
    ) -> Future:
60
        """
61
        Submits a task to the executor.
62

63
        Args:
64
            fn (callable): The function to be executed.
65
            *args: Variable length argument list.
66
            resource_dict (dict, optional): A dictionary of resources required by the task. Defaults to {}.
67
            **kwargs: Arbitrary keyword arguments.
68

69
        Returns:
70
            Future: A future object representing the result of the task.
71

72
        """
73
        if not self._generate_dependency_graph:
1✔
74
            f = super().submit(fn, *args, resource_dict=resource_dict, **kwargs)
1✔
75
        else:
76
            f = Future()
1✔
77
            f.set_result(None)
1✔
78
            task_dict = {
1✔
79
                "fn": fn,
80
                "args": args,
81
                "kwargs": kwargs,
82
                "future": f,
83
                "resource_dict": resource_dict,
84
            }
85
            task_hash = generate_task_hash(
1✔
86
                task_dict=task_dict,
87
                future_hash_inverse_dict={
88
                    v: k for k, v in self._future_hash_dict.items()
89
                },
90
            )
91
            self._future_hash_dict[task_hash] = f
1✔
92
            self._task_hash_dict[task_hash] = task_dict
1✔
93
        return f
1✔
94

95
    def __exit__(
1✔
96
        self,
97
        exc_type: Any,
98
        exc_val: Any,
99
        exc_tb: Any,
100
    ) -> None:
101
        """
102
        Exit method called when exiting the context manager.
103

104
        Args:
105
            exc_type: The type of the exception.
106
            exc_val: The exception instance.
107
            exc_tb: The traceback object.
108

109
        """
110
        super().__exit__(exc_type=exc_type, exc_val=exc_val, exc_tb=exc_tb)
1✔
111
        if self._generate_dependency_graph:
1✔
112
            node_lst, edge_lst = generate_nodes_and_edges(
1✔
113
                task_hash_dict=self._task_hash_dict,
114
                future_hash_inverse_dict={
115
                    v: k for k, v in self._future_hash_dict.items()
116
                },
117
            )
118
            return draw(node_lst=node_lst, edge_lst=edge_lst)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc