• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

daisytuner / sdfglib / 16313696218

16 Jul 2025 07:52AM UTC coverage: 64.642% (-0.2%) from 64.843%
16313696218

push

github

web-flow
Merge pull request #141 from daisytuner/dataflow-ranges

Several convenience improvements for library nodes

60 of 141 new or added lines in 14 files covered. (42.55%)

8 existing lines in 7 files now uncovered.

8556 of 13236 relevant lines covered (64.64%)

178.45 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

2.45
/src/passes/dataflow/memlet_propagation.cpp
1
#include "sdfg/passes/dataflow/memlet_propagation.h"
2

3
#include "sdfg/analysis/users.h"
4
#include "sdfg/types/utils.h"
5

6
namespace sdfg {
7
namespace passes {
8

9
ForwardMemletPropagation::ForwardMemletPropagation()
1✔
10
    : Pass() {
1✔
11

12
      };
1✔
13

14
std::string ForwardMemletPropagation::name() { return "ForwardMemletPropagation"; };
×
15

16
bool ForwardMemletPropagation::
NEW
17
    run_pass(builder::StructuredSDFGBuilder& builder, analysis::AnalysisManager& analysis_manager) {
×
UNCOV
18
    bool applied = false;
×
19

20
    auto& sdfg = builder.subject();
×
21
    auto& users = analysis_manager.get<analysis::Users>();
×
22

23
    std::unordered_set<std::string> visited;
×
24
    for (auto& container : sdfg.containers()) {
×
25
        if (visited.find(container) != visited.end()) {
×
26
            continue;
×
27
        }
28
        if (!sdfg.is_transient(container)) {
×
29
            continue;
×
30
        }
31
        auto& type = sdfg.type(container);
×
32
        if (!dynamic_cast<const types::Scalar*>(&type)) {
×
33
            continue;
×
34
        }
35

36
        // Criterion: Scalar written once
37
        if (!users.views(container).empty() || !users.moves(container).empty()) {
×
38
            continue;
×
39
        }
40
        if (users.writes(container).size() != 1) {
×
41
            continue;
×
42
        }
43

44
        // Criterion: Written to by access node
45
        auto write = users.writes(container).at(0);
×
46
        if (!dynamic_cast<data_flow::AccessNode*>(write->element())) {
×
47
            continue;
×
48
        }
49
        auto& access_node = static_cast<data_flow::AccessNode&>(*write->element());
×
50
        auto& graph = *write->parent();
×
51
        auto& block = static_cast<structured_control_flow::Block&>(*graph.get_parent());
×
52

53
        // Criterion: Access node is connected to an assignment tasklet
54
        const data_flow::Tasklet* tasklet = nullptr;
×
55
        if (graph.in_degree(access_node) == 1) {
×
56
            auto& edge = *graph.in_edges(access_node).begin();
×
57
            auto src = dynamic_cast<const data_flow::Tasklet*>(&edge.src());
×
58
            if (graph.in_degree(*src) == 1 && !src->is_conditional()) {
×
59
                if (src->code() == data_flow::TaskletCode::assign) {
×
60
                    tasklet = src;
×
61
                }
×
62
            }
×
63
        }
×
64
        if (!tasklet) {
×
65
            continue;
×
66
        }
67

68
        // Retrieve assigning data
69
        auto& edge = *graph.in_edges(*tasklet).begin();
×
70
        auto& src = static_cast<const data_flow::AccessNode&>(edge.src());
×
71
        std::string assigning_data = src.data();
×
72
        if (symbolic::is_nv(symbolic::symbol(assigning_data))) {
×
73
            continue;
×
74
        }
75
        data_flow::Subset assigning_subset = edge.subset();
×
76
        const data_flow::AccessNode* assigning_node = &src;
×
77

78
        // Criterion: Not casting types
79
        auto& assigning_type = types::infer_type(sdfg, sdfg.type(assigning_data), assigning_subset);
×
NEW
80
        if (assigning_type.primitive_type() != tasklet->input_type(edge.dst_conn()).primitive_type()) {
×
81
            continue;
×
82
        }
83
        if (type.primitive_type() != tasklet->output().second.primitive_type()) {
×
84
            continue;
×
85
        }
86

87
        // Risky symbols
88
        std::unordered_set<std::string> risky_symbols = {assigning_data};
×
89
        for (auto& dim : assigning_subset) {
×
90
            for (auto& atom : symbolic::atoms(dim)) {
×
91
                auto sym = SymEngine::rcp_static_cast<const SymEngine::Symbol>(atom)->get_name();
×
92
                risky_symbols.insert(sym);
×
93
            }
×
94
        }
95

96
        // Propagate assigning data into container uses
97
        auto reads = users.reads(container);
×
98
        size_t propagated = reads.size();
×
99
        for (auto read : reads) {
×
100
            auto read_node = dynamic_cast<data_flow::AccessNode*>(read->element());
×
101
            if (!read_node) {
×
102
                continue;
×
103
            }
104
            auto read_graph = read->parent();
×
105
            if (read_node == &access_node) {
×
106
                propagated--;
×
107
                continue;
×
108
            }
109
            // Criterion: Data races
110
            if (!users.dominates(*write, *read)) {
×
111
                continue;
×
112
            }
113
            auto& uses_between = users.all_uses_between(*write, *read);
×
114
            bool assigning_data_is_written = false;
×
115
            for (auto& user : uses_between) {
×
116
                // Unsafe pointer operations
117
                if (user->use() == analysis::Use::MOVE) {
×
118
                    // Viewed container is not constant
119
                    if (user->container() == assigning_data) {
×
120
                        assigning_data_is_written = true;
×
121
                        break;
×
122
                    }
123

124
                    // Unsafe pointer operations
125
                    auto& move_node = dynamic_cast<data_flow::AccessNode&>(*user->element());
×
126
                    auto& move_graph = *user->parent();
×
127
                    auto& move_edge = *move_graph.in_edges(move_node).begin();
×
128
                    auto& view_node = dynamic_cast<data_flow::AccessNode&>(move_edge.src());
×
NEW
129
                    if (move_edge.dst_conn() == "void" || symbolic::is_pointer(symbolic::symbol(view_node.data()))) {
×
130
                        assigning_data_is_written = true;
×
131
                        break;
×
132
                    }
133
                } else if (user->use() == analysis::Use::WRITE) {
×
134
                    if (risky_symbols.find(user->container()) != risky_symbols.end()) {
×
135
                        assigning_data_is_written = true;
×
136
                        break;
×
137
                    }
138
                }
×
139
            }
140
            if (assigning_data_is_written) {
×
141
                continue;
×
142
            }
143

144
            // Casting
145
            bool unsafe_cast = false;
×
146
            for (auto& oedge : read_graph->out_edges(*read_node)) {
×
147
                auto& dst = dynamic_cast<const data_flow::Tasklet&>(oedge.dst());
×
148
                if (dst.input_type(oedge.dst_conn()).primitive_type() != type.primitive_type()) {
×
149
                    unsafe_cast = true;
×
150
                    break;
×
151
                }
152
            }
153
            if (unsafe_cast) {
×
154
                continue;
×
155
            }
156

157
            // Propagate
158
            read_node->data() = assigning_data;
×
159
            for (auto& oedge : read_graph->out_edges(*read_node)) {
×
NEW
160
                oedge.set_subset(assigning_subset);
×
161
            }
162

163
            propagated--;
×
164
            applied = true;
×
165
        }
×
166

167
        if (propagated != reads.size()) {
×
168
            visited.insert(container);
×
169
            visited.insert(assigning_data);
×
170
        }
×
171

172
        // Remove tasklet and access node if all reads were propagated
173
        if (propagated == 0 && false) {
×
174
            auto& tasklet_in_edge = *graph.in_edges(*tasklet).begin();
×
175
            auto& tasklet_out_edge = *graph.out_edges(*tasklet).begin();
×
176
            if (graph.out_degree(access_node) > 0) {
×
177
                access_node.data() = assigning_data;
×
178
                for (auto& oedge : graph.out_edges(access_node)) {
×
NEW
179
                    oedge.set_subset(assigning_subset);
×
180
                }
181
                std::vector<data_flow::Memlet*> assigning_node_in_edges;
×
182
                for (auto& iedge : graph.in_edges(*assigning_node)) {
×
183
                    assigning_node_in_edges.push_back(&iedge);
×
184
                }
185
                for (auto& iedge : assigning_node_in_edges) {
×
NEW
186
                    builder.add_memlet(
×
NEW
187
                        block, iedge->src(), iedge->src_conn(), access_node, iedge->dst_conn(), iedge->subset()
×
188
                    );
189
                }
190
                for (auto& iedge : assigning_node_in_edges) {
×
191
                    builder.remove_memlet(block, *iedge);
×
192
                }
193
            }
×
194
            builder.remove_memlet(block, tasklet_in_edge);
×
195
            builder.remove_memlet(block, tasklet_out_edge);
×
196
            builder.remove_node(block, *tasklet);
×
197
            builder.remove_node(block, *assigning_node);
×
198
            if (graph.out_degree(access_node) == 0) {
×
199
                builder.remove_node(block, access_node);
×
200
            }
×
201
            applied = true;
×
202
        }
×
203
    }
×
204

205
    return applied;
×
206
};
×
207

208
BackwardMemletPropagation::BackwardMemletPropagation()
1✔
209
    : Pass() {
1✔
210

211
      };
1✔
212

213
std::string BackwardMemletPropagation::name() { return "BackwardMemletPropagation"; };
×
214

215
bool BackwardMemletPropagation::
NEW
216
    run_pass(builder::StructuredSDFGBuilder& builder, analysis::AnalysisManager& analysis_manager) {
×
UNCOV
217
    bool applied = false;
×
218

219
    auto& sdfg = builder.subject();
×
220
    auto& users = analysis_manager.get<analysis::Users>();
×
221

222
    std::unordered_set<std::string> visited;
×
223
    for (auto& container : sdfg.containers()) {
×
224
        if (visited.find(container) != visited.end()) {
×
225
            continue;
×
226
        }
227
        if (!sdfg.is_transient(container)) {
×
228
            continue;
×
229
        }
230
        auto& type = sdfg.type(container);
×
231
        if (!dynamic_cast<const types::Scalar*>(&type)) {
×
232
            continue;
×
233
        }
234

235
        // Criterion: Scalar written once
236
        if (!users.views(container).empty() || !users.moves(container).empty()) {
×
237
            continue;
×
238
        }
239
        if (users.writes(container).size() != 1 || users.reads(container).size() != 1) {
×
240
            continue;
×
241
        }
242

243
        // Criterion: Read by access node
244
        auto read = users.reads(container).at(0);
×
245
        if (!dynamic_cast<data_flow::AccessNode*>(read->element())) {
×
246
            continue;
×
247
        }
248
        auto write = users.writes(container).at(0);
×
249
        if (!dynamic_cast<data_flow::AccessNode*>(write->element())) {
×
250
            continue;
×
251
        }
252

253
        auto& read_access_node = static_cast<data_flow::AccessNode&>(*read->element());
×
254
        auto& read_graph = *read->parent();
×
255
        auto& read_block = static_cast<structured_control_flow::Block&>(*read_graph.get_parent());
×
256

257
        // Criterion: Access node is connected to an assignment tasklet
258
        const data_flow::Tasklet* tasklet = nullptr;
×
259
        if (read_graph.out_degree(read_access_node) == 1) {
×
260
            auto& edge = *read_graph.out_edges(read_access_node).begin();
×
261
            auto dst = dynamic_cast<const data_flow::Tasklet*>(&edge.dst());
×
262
            if (read_graph.in_degree(*dst) == 1 && !dst->is_conditional()) {
×
263
                if (dst->code() == data_flow::TaskletCode::assign) {
×
264
                    tasklet = dst;
×
265
                }
×
266
            }
×
267
        }
×
268
        if (!tasklet) {
×
269
            continue;
×
270
        }
271

272
        // Retrieve assigning data
273
        auto& edge = *read_graph.out_edges(*tasklet).begin();
×
274
        auto& dst = static_cast<const data_flow::AccessNode&>(edge.dst());
×
275
        std::string assigning_data = dst.data();
×
276
        data_flow::Subset assigning_subset = edge.subset();
×
277
        const data_flow::AccessNode* assigning_node = &dst;
×
278
        if (read_graph.out_degree(*assigning_node) != 0) {
×
279
            continue;
×
280
        }
281

282
        // Criterion: Not casting types
283
        auto& assigning_type = types::infer_type(sdfg, sdfg.type(assigning_data), assigning_subset);
×
284
        if (assigning_type.primitive_type() != tasklet->output().second.primitive_type()) {
×
285
            continue;
×
286
        }
287
        if (type.primitive_type() != tasklet->inputs().begin()->second.primitive_type()) {
×
288
            continue;
×
289
        }
290

291
        // Risky symbols
292
        std::unordered_set<std::string> risky_symbols = {assigning_data};
×
293
        for (auto& dim : assigning_subset) {
×
294
            for (auto& atom : symbolic::atoms(dim)) {
×
295
                auto sym = SymEngine::rcp_static_cast<const SymEngine::Symbol>(atom)->get_name();
×
296
                risky_symbols.insert(sym);
×
297
            }
×
298
        }
299

300
        // Propagate assigning data into container write
301

302
        // Criterion: Written to by access node
303
        auto& write_access_node = static_cast<data_flow::AccessNode&>(*write->element());
×
304
        auto& write_graph = *write->parent();
×
305

306
        // Criterion: Data races
307
        if (!users.dominates(*write, *read) || !users.post_dominates(*read, *write)) {
×
308
            continue;
×
309
        }
310
        auto& uses_between = users.all_uses_between(*write, *read);
×
311
        bool race_condition = false;
×
312
        for (auto& user : uses_between) {
×
313
            // Unsafe pointer operations
314
            if (user->use() == analysis::Use::MOVE) {
×
315
                // Viewed container is not constant
316
                if (user->container() == assigning_data) {
×
317
                    race_condition = true;
×
318
                    break;
×
319
                }
320

321
                // Unsafe pointer operations
322
                auto& move_node = dynamic_cast<data_flow::AccessNode&>(*user->element());
×
323
                auto& move_graph = *user->parent();
×
324
                auto& move_edge = *move_graph.in_edges(move_node).begin();
×
325
                auto& view_node = dynamic_cast<data_flow::AccessNode&>(move_edge.src());
×
NEW
326
                if (move_edge.dst_conn() == "void" || symbolic::is_pointer(symbolic::symbol(view_node.data()))) {
×
327
                    race_condition = true;
×
328
                    break;
×
329
                }
330
            } else if (user->use() == analysis::Use::WRITE) {
×
331
                if (risky_symbols.find(user->container()) != risky_symbols.end()) {
×
332
                    race_condition = true;
×
333
                    break;
×
334
                }
335
            } else if (user->use() == analysis::Use::READ) {
×
336
                if (user->container() == assigning_data) {
×
337
                    race_condition = true;
×
338
                    break;
×
339
                }
340
            }
×
341
        }
342
        if (race_condition) {
×
343
            continue;
×
344
        }
345

346
        // Propagate
347
        write_access_node.data() = assigning_data;
×
348
        for (auto& iedge : write_graph.in_edges(write_access_node)) {
×
NEW
349
            iedge.set_subset(assigning_subset);
×
350
        }
351
        for (auto& oedge : write_graph.out_edges(write_access_node)) {
×
NEW
352
            oedge.set_subset(assigning_subset);
×
353
        }
354
        applied = true;
×
355

356
        visited.insert(container);
×
357
        visited.insert(assigning_data);
×
358

359
        // Remove tasklet and access node if all reads were propagated
360
        auto& tasklet_in_edge = *read_graph.in_edges(*tasklet).begin();
×
361
        auto& tasklet_out_edge = *read_graph.out_edges(*tasklet).begin();
×
362
        builder.remove_memlet(read_block, tasklet_in_edge);
×
363
        builder.remove_memlet(read_block, tasklet_out_edge);
×
364
        builder.remove_node(read_block, *tasklet);
×
365
        builder.remove_node(read_block, *assigning_node);
×
366
        if (read_graph.in_degree(read_access_node) == 0) {
×
367
            builder.remove_node(read_block, read_access_node);
×
368
        }
×
369
    }
×
370

371
    return applied;
×
372
};
×
373

374
} // namespace passes
375
} // namespace sdfg
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc