• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In
Build has been canceled!

daisytuner / sdfglib / 15518226219

08 Jun 2025 12:03PM UTC coverage: 61.652% (-0.8%) from 62.447%
15518226219

push

github

web-flow
Merge pull request #65 from daisytuner/lib-nodes-plugins

Adds registry for library node dispatchers

41 of 77 new or added lines in 5 files covered. (53.25%)

124 existing lines in 10 files now uncovered.

7249 of 11758 relevant lines covered (61.65%)

125.57 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/passes/dataflow/memlet_propagation.cpp
1
#include "sdfg/passes/dataflow/memlet_propagation.h"
2

3
namespace sdfg {
4
namespace passes {
5

UNCOV
6
ForwardMemletPropagation::ForwardMemletPropagation()
×
UNCOV
7
    : Pass() {
×
8

UNCOV
9
      };
×
10

11
std::string ForwardMemletPropagation::name() { return "ForwardMemletPropagation"; };
×
12

UNCOV
13
bool ForwardMemletPropagation::run_pass(builder::StructuredSDFGBuilder& builder,
×
14
                                        analysis::AnalysisManager& analysis_manager) {
UNCOV
15
    bool applied = false;
×
16

UNCOV
17
    auto& sdfg = builder.subject();
×
UNCOV
18
    auto& users = analysis_manager.get<analysis::Users>();
×
19

UNCOV
20
    std::unordered_set<std::string> visited;
×
UNCOV
21
    for (auto& container : sdfg.containers()) {
×
UNCOV
22
        if (visited.find(container) != visited.end()) {
×
23
            continue;
×
24
        }
UNCOV
25
        if (!sdfg.is_transient(container)) {
×
UNCOV
26
            continue;
×
27
        }
UNCOV
28
        auto& type = sdfg.type(container);
×
UNCOV
29
        if (!dynamic_cast<const types::Scalar*>(&type)) {
×
UNCOV
30
            continue;
×
31
        }
32

33
        // Criterion: Scalar written once
UNCOV
34
        if (!users.views(container).empty() || !users.moves(container).empty()) {
×
35
            continue;
×
36
        }
UNCOV
37
        if (users.writes(container).size() != 1) {
×
UNCOV
38
            continue;
×
39
        }
40

41
        // Criterion: Written to by access node
42
        auto write = users.writes(container).at(0);
×
43
        if (!dynamic_cast<data_flow::AccessNode*>(write->element())) {
×
44
            continue;
×
45
        }
46
        auto& access_node = static_cast<data_flow::AccessNode&>(*write->element());
×
47
        auto& graph = *write->parent();
×
48
        auto& block = static_cast<structured_control_flow::Block&>(*graph.get_parent());
×
49

50
        // Criterion: Access node is connected to an assignment tasklet
51
        const data_flow::Tasklet* tasklet = nullptr;
×
52
        if (graph.in_degree(access_node) == 1) {
×
53
            auto& edge = *graph.in_edges(access_node).begin();
×
54
            auto src = dynamic_cast<const data_flow::Tasklet*>(&edge.src());
×
55
            if (graph.in_degree(*src) == 1 && !src->is_conditional()) {
×
56
                if (src->code() == data_flow::TaskletCode::assign) {
×
57
                    tasklet = src;
×
58
                }
×
59
            }
×
60
        }
×
61
        if (!tasklet) {
×
62
            continue;
×
63
        }
64

65
        // Retrieve assigning data
66
        auto& edge = *graph.in_edges(*tasklet).begin();
×
67
        auto& src = static_cast<const data_flow::AccessNode&>(edge.src());
×
68
        std::string assigning_data = src.data();
×
69
        if (symbolic::is_nv(symbolic::symbol(assigning_data))) {
×
70
            continue;
×
71
        }
72
        data_flow::Subset assigning_subset = edge.subset();
×
73
        const data_flow::AccessNode* assigning_node = &src;
×
74

75
        // Criterion: Not casting types
76
        auto& assigning_type = types::infer_type(sdfg, sdfg.type(assigning_data), assigning_subset);
×
77
        if (assigning_type.primitive_type() !=
×
78
            tasklet->input_type(edge.dst_conn()).primitive_type()) {
×
79
            continue;
×
80
        }
81
        if (type.primitive_type() != tasklet->output().second.primitive_type()) {
×
82
            continue;
×
83
        }
84

85
        // Risky symbols
86
        std::unordered_set<std::string> risky_symbols = {assigning_data};
×
87
        for (auto& dim : assigning_subset) {
×
88
            for (auto& atom : symbolic::atoms(dim)) {
×
89
                auto sym = SymEngine::rcp_static_cast<const SymEngine::Symbol>(atom)->get_name();
×
90
                risky_symbols.insert(sym);
×
91
            }
×
92
        }
93

94
        // Propagate assigning data into container uses
95
        auto reads = users.reads(container);
×
96
        size_t propagated = reads.size();
×
97
        for (auto read : reads) {
×
98
            auto read_node = dynamic_cast<data_flow::AccessNode*>(read->element());
×
99
            if (!read_node) {
×
100
                continue;
×
101
            }
102
            auto read_graph = read->parent();
×
103
            if (read_node == &access_node) {
×
104
                propagated--;
×
105
                continue;
×
106
            }
107
            // Criterion: Data races
108
            if (!users.dominates(*write, *read)) {
×
109
                continue;
×
110
            }
111
            auto& uses_between = users.all_uses_between(*write, *read);
×
112
            bool assigning_data_is_written = false;
×
113
            for (auto& user : uses_between) {
×
114
                // Unsafe pointer operations
115
                if (user->use() == analysis::Use::MOVE) {
×
116
                    // Viewed container is not constant
117
                    if (user->container() == assigning_data) {
×
118
                        assigning_data_is_written = true;
×
119
                        break;
×
120
                    }
121

122
                    // Unsafe pointer operations
123
                    auto& move_node = dynamic_cast<data_flow::AccessNode&>(*user->element());
×
124
                    auto& move_graph = *user->parent();
×
125
                    auto& move_edge = *move_graph.in_edges(move_node).begin();
×
126
                    auto& view_node = dynamic_cast<data_flow::AccessNode&>(move_edge.src());
×
127
                    if (move_edge.dst_conn() == "void" ||
×
128
                        symbolic::is_pointer(symbolic::symbol(view_node.data()))) {
×
129
                        assigning_data_is_written = true;
×
130
                        break;
×
131
                    }
132
                } else if (user->use() == analysis::Use::WRITE) {
×
133
                    if (risky_symbols.find(user->container()) != risky_symbols.end()) {
×
134
                        assigning_data_is_written = true;
×
135
                        break;
×
136
                    }
137
                }
×
138
            }
139
            if (assigning_data_is_written) {
×
140
                continue;
×
141
            }
142

143
            // Casting
144
            bool unsafe_cast = false;
×
145
            for (auto& oedge : read_graph->out_edges(*read_node)) {
×
146
                auto& dst = dynamic_cast<const data_flow::Tasklet&>(oedge.dst());
×
147
                if (dst.input_type(oedge.dst_conn()).primitive_type() != type.primitive_type()) {
×
148
                    unsafe_cast = true;
×
149
                    break;
×
150
                }
151
            }
152
            if (unsafe_cast) {
×
153
                continue;
×
154
            }
155

156
            // Propagate
157
            read_node->data() = assigning_data;
×
158
            for (auto& oedge : read_graph->out_edges(*read_node)) {
×
159
                oedge.subset() = assigning_subset;
×
160
            }
161

162
            propagated--;
×
163
            applied = true;
×
164
        }
×
165

166
        if (propagated != reads.size()) {
×
167
            visited.insert(container);
×
168
            visited.insert(assigning_data);
×
169
        }
×
170

171
        // Remove tasklet and access node if all reads were propagated
172
        if (propagated == 0 && false) {
×
173
            auto& tasklet_in_edge = *graph.in_edges(*tasklet).begin();
×
174
            auto& tasklet_out_edge = *graph.out_edges(*tasklet).begin();
×
175
            if (graph.out_degree(access_node) > 0) {
×
176
                access_node.data() = assigning_data;
×
177
                for (auto& oedge : graph.out_edges(access_node)) {
×
178
                    oedge.subset() = assigning_subset;
×
179
                }
180
                std::vector<data_flow::Memlet*> assigning_node_in_edges;
×
181
                for (auto& iedge : graph.in_edges(*assigning_node)) {
×
182
                    assigning_node_in_edges.push_back(&iedge);
×
183
                }
184
                for (auto& iedge : assigning_node_in_edges) {
×
185
                    builder.add_memlet(block, iedge->src(), iedge->src_conn(), access_node,
×
186
                                       iedge->dst_conn(), iedge->subset());
×
187
                }
188
                for (auto& iedge : assigning_node_in_edges) {
×
189
                    builder.remove_memlet(block, *iedge);
×
190
                }
191
            }
×
192
            builder.remove_memlet(block, tasklet_in_edge);
×
193
            builder.remove_memlet(block, tasklet_out_edge);
×
194
            builder.remove_node(block, *tasklet);
×
195
            builder.remove_node(block, *assigning_node);
×
196
            if (graph.out_degree(access_node) == 0) {
×
197
                builder.remove_node(block, access_node);
×
198
            }
×
199
            applied = true;
×
200
        }
×
201
    }
×
202

UNCOV
203
    return applied;
×
UNCOV
204
};
×
205

UNCOV
206
BackwardMemletPropagation::BackwardMemletPropagation()
×
UNCOV
207
    : Pass() {
×
208

UNCOV
209
      };
×
210

211
std::string BackwardMemletPropagation::name() { return "BackwardMemletPropagation"; };
×
212

UNCOV
213
bool BackwardMemletPropagation::run_pass(builder::StructuredSDFGBuilder& builder,
×
214
                                         analysis::AnalysisManager& analysis_manager) {
UNCOV
215
    bool applied = false;
×
216

UNCOV
217
    auto& sdfg = builder.subject();
×
UNCOV
218
    auto& users = analysis_manager.get<analysis::Users>();
×
219

UNCOV
220
    std::unordered_set<std::string> visited;
×
UNCOV
221
    for (auto& container : sdfg.containers()) {
×
UNCOV
222
        if (visited.find(container) != visited.end()) {
×
223
            continue;
×
224
        }
UNCOV
225
        if (!sdfg.is_transient(container)) {
×
UNCOV
226
            continue;
×
227
        }
UNCOV
228
        auto& type = sdfg.type(container);
×
UNCOV
229
        if (!dynamic_cast<const types::Scalar*>(&type)) {
×
UNCOV
230
            continue;
×
231
        }
232

233
        // Criterion: Scalar written once
UNCOV
234
        if (!users.views(container).empty() || !users.moves(container).empty()) {
×
235
            continue;
×
236
        }
UNCOV
237
        if (users.writes(container).size() != 1 || users.reads(container).size() != 1) {
×
UNCOV
238
            continue;
×
239
        }
240

241
        // Criterion: Read by access node
242
        auto read = users.reads(container).at(0);
×
243
        if (!dynamic_cast<data_flow::AccessNode*>(read->element())) {
×
244
            continue;
×
245
        }
246
        auto write = users.writes(container).at(0);
×
247
        if (!dynamic_cast<data_flow::AccessNode*>(write->element())) {
×
248
            continue;
×
249
        }
250

251
        auto& read_access_node = static_cast<data_flow::AccessNode&>(*read->element());
×
252
        auto& read_graph = *read->parent();
×
253
        auto& read_block = static_cast<structured_control_flow::Block&>(*read_graph.get_parent());
×
254

255
        // Criterion: Access node is connected to an assignment tasklet
256
        const data_flow::Tasklet* tasklet = nullptr;
×
257
        if (read_graph.out_degree(read_access_node) == 1) {
×
258
            auto& edge = *read_graph.out_edges(read_access_node).begin();
×
259
            auto dst = dynamic_cast<const data_flow::Tasklet*>(&edge.dst());
×
260
            if (read_graph.in_degree(*dst) == 1 && !dst->is_conditional()) {
×
261
                if (dst->code() == data_flow::TaskletCode::assign) {
×
262
                    tasklet = dst;
×
263
                }
×
264
            }
×
265
        }
×
266
        if (!tasklet) {
×
267
            continue;
×
268
        }
269

270
        // Retrieve assigning data
271
        auto& edge = *read_graph.out_edges(*tasklet).begin();
×
272
        auto& dst = static_cast<const data_flow::AccessNode&>(edge.dst());
×
273
        std::string assigning_data = dst.data();
×
274
        data_flow::Subset assigning_subset = edge.subset();
×
275
        const data_flow::AccessNode* assigning_node = &dst;
×
276
        if (read_graph.out_degree(*assigning_node) != 0) {
×
277
            continue;
×
278
        }
279

280
        // Criterion: Not casting types
281
        auto& assigning_type = types::infer_type(sdfg, sdfg.type(assigning_data), assigning_subset);
×
282
        if (assigning_type.primitive_type() != tasklet->output().second.primitive_type()) {
×
283
            continue;
×
284
        }
285
        if (type.primitive_type() != tasklet->inputs().begin()->second.primitive_type()) {
×
286
            continue;
×
287
        }
288

289
        // Risky symbols
290
        std::unordered_set<std::string> risky_symbols = {assigning_data};
×
291
        for (auto& dim : assigning_subset) {
×
292
            for (auto& atom : symbolic::atoms(dim)) {
×
293
                auto sym = SymEngine::rcp_static_cast<const SymEngine::Symbol>(atom)->get_name();
×
294
                risky_symbols.insert(sym);
×
295
            }
×
296
        }
297

298
        // Propagate assigning data into container write
299

300
        // Criterion: Written to by access node
301
        auto& write_access_node = static_cast<data_flow::AccessNode&>(*write->element());
×
302
        auto& write_graph = *write->parent();
×
303

304
        // Criterion: Data races
305
        if (!users.dominates(*write, *read) || !users.post_dominates(*read, *write)) {
×
306
            continue;
×
307
        }
308
        auto& uses_between = users.all_uses_between(*write, *read);
×
309
        bool race_condition = false;
×
310
        for (auto& user : uses_between) {
×
311
            // Unsafe pointer operations
312
            if (user->use() == analysis::Use::MOVE) {
×
313
                // Viewed container is not constant
314
                if (user->container() == assigning_data) {
×
315
                    race_condition = true;
×
316
                    break;
×
317
                }
318

319
                // Unsafe pointer operations
320
                auto& move_node = dynamic_cast<data_flow::AccessNode&>(*user->element());
×
321
                auto& move_graph = *user->parent();
×
322
                auto& move_edge = *move_graph.in_edges(move_node).begin();
×
323
                auto& view_node = dynamic_cast<data_flow::AccessNode&>(move_edge.src());
×
324
                if (move_edge.dst_conn() == "void" ||
×
325
                    symbolic::is_pointer(symbolic::symbol(view_node.data()))) {
×
326
                    race_condition = true;
×
327
                    break;
×
328
                }
329
            } else if (user->use() == analysis::Use::WRITE) {
×
330
                if (risky_symbols.find(user->container()) != risky_symbols.end()) {
×
331
                    race_condition = true;
×
332
                    break;
×
333
                }
334
            } else if (user->use() == analysis::Use::READ) {
×
335
                if (user->container() == assigning_data) {
×
336
                    race_condition = true;
×
337
                    break;
×
338
                }
339
            }
×
340
        }
341
        if (race_condition) {
×
342
            continue;
×
343
        }
344

345
        // Propagate
346
        write_access_node.data() = assigning_data;
×
347
        for (auto& iedge : write_graph.in_edges(write_access_node)) {
×
348
            iedge.subset() = assigning_subset;
×
349
        }
350
        for (auto& oedge : write_graph.out_edges(write_access_node)) {
×
351
            oedge.subset() = assigning_subset;
×
352
        }
353
        applied = true;
×
354

355
        visited.insert(container);
×
356
        visited.insert(assigning_data);
×
357

358
        // Remove tasklet and access node if all reads were propagated
359
        auto& tasklet_in_edge = *read_graph.in_edges(*tasklet).begin();
×
360
        auto& tasklet_out_edge = *read_graph.out_edges(*tasklet).begin();
×
361
        builder.remove_memlet(read_block, tasklet_in_edge);
×
362
        builder.remove_memlet(read_block, tasklet_out_edge);
×
363
        builder.remove_node(read_block, *tasklet);
×
364
        builder.remove_node(read_block, *assigning_node);
×
365
        if (read_graph.in_degree(read_access_node) == 0) {
×
366
            builder.remove_node(read_block, read_access_node);
×
367
        }
×
368
    }
×
369

UNCOV
370
    return applied;
×
UNCOV
371
};
×
372

373
}  // namespace passes
374
}  // namespace sdfg
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc