• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

daisytuner / docc / 21873449786

10 Feb 2026 04:31PM UTC coverage: 66.315% (-0.2%) from 66.496%
21873449786

Pull #513

github

web-flow
Merge 55ce04c5d into 5750a460e
Pull Request #513: adds tensor type

244 of 431 new or added lines in 18 files covered. (56.61%)

186 existing lines in 15 files now uncovered.

23488 of 35419 relevant lines covered (66.31%)

372.32 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

69.9
/sdfg/src/types/utils.cpp
1
#include "sdfg/types/utils.h"
2
#include <cmath>
3
#include <memory>
4
#include <string>
5

6
#include "sdfg/codegen/utils.h"
7
#include "sdfg/function.h"
8
#include "sdfg/symbolic/symbolic.h"
9

10
#include "sdfg/types/structure.h"
11
#include "sdfg/types/type.h"
12

13
namespace sdfg {
14
namespace types {
15

16
std::unique_ptr<types::IType>
17
infer_type_internal(const sdfg::Function& function, const types::IType& type, const data_flow::Subset& subset) {
5,665✔
18
    if (subset.empty()) {
5,665✔
19
        return type.clone();
2,974✔
20
    }
2,974✔
21

22
    if (type.type_id() == TypeID::Scalar) {
2,691✔
23
        if (!subset.empty()) {
×
24
            throw InvalidSDFGException("Scalar type must have no subset");
×
25
        }
×
26

NEW
27
        return type.clone();
×
28
    } else if (type.type_id() == TypeID::Array) {
2,691✔
29
        auto& array_type = static_cast<const types::Array&>(type);
2,682✔
30

31
        data_flow::Subset element_subset(subset.begin() + 1, subset.end());
2,682✔
32
        return infer_type_internal(function, array_type.element_type(), element_subset);
2,682✔
33
    } else if (type.type_id() == TypeID::Structure) {
2,682✔
34
        auto& structure_type = static_cast<const types::Structure&>(type);
7✔
35

36
        data_flow::Subset element_subset(subset.begin() + 1, subset.end());
7✔
37

38
        auto& definition = function.structure(structure_type.name());
7✔
39
        if (definition.is_vector()) {
7✔
40
            return infer_type_internal(function, definition.vector_element_type(), element_subset);
6✔
41
        }
6✔
42
        auto member = SymEngine::rcp_dynamic_cast<const SymEngine::Integer>(subset.at(0));
1✔
43
        return infer_type_internal(function, definition.member_type(member), element_subset);
1✔
44
    } else if (type.type_id() == TypeID::Tensor) {
7✔
45
        auto& tensor_type = static_cast<const types::Tensor&>(type);
2✔
46

47
        data_flow::Subset element_subset(subset.begin() + 1, subset.end());
2✔
48

49
        if (tensor_type.shape().size() == 1) {
2✔
50
            return infer_type_internal(function, tensor_type.element_type(), element_subset);
1✔
51
        } else {
1✔
52
            auto inner_tensor = std::make_unique<types::Tensor>(
1✔
53
                tensor_type.storage_type(),
1✔
54
                tensor_type.alignment(),
1✔
55
                tensor_type.initializer(),
1✔
56
                tensor_type.element_type(),
1✔
57
                symbolic::MultiExpression(tensor_type.shape().begin() + 1, tensor_type.shape().end()),
1✔
58
                symbolic::MultiExpression(tensor_type.strides().begin() + 1, tensor_type.strides().end()),
1✔
59
                tensor_type.offset()
1✔
60
            );
1✔
61
            return infer_type_internal(function, *inner_tensor, element_subset);
1✔
62
        }
1✔
63
    } else if (type.type_id() == TypeID::Pointer) {
2✔
64
        throw InvalidSDFGException("Subset references non-contiguous memory");
×
65
    }
×
66

67
    throw InvalidSDFGException("Type inference failed because of unknown type");
×
68
};
2,691✔
69

70
std::unique_ptr<types::IType>
71
infer_type(const sdfg::Function& function, const types::IType& type, const data_flow::Subset& subset) {
4,943✔
72
    if (subset.empty()) {
4,943✔
73
        return type.clone();
1,969✔
74
    }
1,969✔
75

76
    if (type.type_id() == TypeID::Pointer) {
2,974✔
77
        auto& pointer_type = static_cast<const types::Pointer&>(type);
1,911✔
78
        if (!pointer_type.has_pointee_type()) {
1,911✔
79
            throw InvalidSDFGException("Opaque pointer with non-empty subset");
×
80
        }
×
81

82
        auto& pointee_type = pointer_type.pointee_type();
1,911✔
83
        data_flow::Subset element_subset(subset.begin() + 1, subset.end());
1,911✔
84
        return infer_type_internal(function, pointee_type, element_subset);
1,911✔
85
    } else {
1,911✔
86
        return infer_type_internal(function, type, subset);
1,063✔
87
    }
1,063✔
88
};
2,974✔
89

90
std::unique_ptr<types::IType> recombine_array_type(const types::IType& type, uint depth, const types::IType& inner_type) {
22✔
91
    if (depth == 0) {
22✔
92
        return inner_type.clone();
10✔
93
    } else {
12✔
94
        if (auto atype = dynamic_cast<const types::Array*>(&type)) {
12✔
95
            return std::make_unique<types::Array>(
12✔
96
                atype->storage_type(),
12✔
97
                atype->alignment(),
12✔
98
                atype->initializer(),
12✔
99
                *recombine_array_type(atype->element_type(), depth - 1, inner_type).get(),
12✔
100
                atype->num_elements()
12✔
101
            );
12✔
102
        } else {
12✔
103
            throw std::runtime_error("construct_type: Non array types are not supported yet!");
×
104
        }
×
105
    }
12✔
106
};
22✔
107

108
const IType& peel_to_innermost_element(const IType& type, int follow_ptr) {
184✔
109
    int next_follow = follow_ptr;
184✔
110
    if (follow_ptr == PEEL_TO_INNERMOST_ELEMENT_FOLLOW_ONLY_OUTER_PTR) {
184✔
111
        next_follow = 0; // only follow an outermost pointer
88✔
112
    }
88✔
113

114
    switch (type.type_id()) {
184✔
115
        case TypeID::Array:
30✔
116
            return peel_to_innermost_element(dynamic_cast<const types::Array&>(type).element_type(), next_follow);
30✔
117
        case TypeID::Reference:
×
118
            return peel_to_innermost_element(dynamic_cast<const codegen::Reference&>(type).reference_type(), next_follow);
×
NEW
119
        case TypeID::Tensor:
×
NEW
120
            return peel_to_innermost_element(dynamic_cast<const types::Tensor&>(type).element_type(), next_follow);
×
121
        case TypeID::Pointer:
47✔
122
            if (follow_ptr != 0) {
47✔
123
                if (follow_ptr != PEEL_TO_INNERMOST_ELEMENT_FOLLOW_ONLY_OUTER_PTR) {
46✔
124
                    next_follow = follow_ptr - 1; // follow one less pointer
6✔
125
                }
6✔
126

127
                auto& pointer_type = dynamic_cast<const types::Pointer&>(type);
46✔
128
                if (pointer_type.has_pointee_type()) {
46✔
129
                    return peel_to_innermost_element(pointer_type.pointee_type(), next_follow);
46✔
130
                } else {
46✔
131
                    return type;
×
132
                }
×
133
            }
46✔
134
            // fall back to cut-off if we did not follow the pointer
135
        default:
108✔
136
            return type;
108✔
137
    }
184✔
138
}
184✔
139

140
symbolic::Expression get_contiguous_element_size(const types::IType& type, bool allow_comp_time_eval) {
46✔
141
    // need to peel explicitly, primitive_type() would follow ALL pointers, even ***, even though this is not contiguous
142
    auto& innermost = peel_to_innermost_element(type, PEEL_TO_INNERMOST_ELEMENT_FOLLOW_ONLY_OUTER_PTR);
46✔
143
    return get_type_size(innermost, allow_comp_time_eval);
46✔
144
}
46✔
145

146
symbolic::Expression get_type_size(const types::IType& type, bool allow_comp_time_eval) {
61✔
147
    bool only_symbolic = false;
61✔
148

149
    auto id = type.type_id();
61✔
150
    if (id == TypeID::Pointer || id == TypeID::Reference || id == TypeID::Function) {
61✔
151
        return symbolic::integer(8); // assume 64-bit pointers
3✔
152
    } else if (id == TypeID::Structure) {
58✔
153
        // TODO if we have the target definition, we could evaluate the StructureDefinition to a size
154
        only_symbolic = true;
5✔
155
    } else if (id == TypeID::Array) {
53✔
156
        auto& arr = dynamic_cast<const types::Array&>(type);
1✔
157
        auto inner_element_size = get_type_size(arr.element_type(), allow_comp_time_eval);
1✔
158
        if (!inner_element_size.is_null()) {
1✔
159
            return symbolic::mul(inner_element_size, arr.num_elements());
1✔
160
        } else {
1✔
161
            return {};
×
162
        }
×
163
    } else if (id == TypeID::Tensor) {
52✔
NEW
164
        auto& tensor = dynamic_cast<const types::Tensor&>(type);
×
NEW
165
        auto inner_element_size = get_type_size(tensor.element_type(), allow_comp_time_eval);
×
NEW
166
        if (!inner_element_size.is_null()) {
×
NEW
167
            symbolic::Expression num_elements = symbolic::one();
×
NEW
168
            for (const auto& dim : tensor.shape()) {
×
NEW
169
                num_elements = symbolic::mul(num_elements, dim);
×
NEW
170
            }
×
NEW
171
            return symbolic::mul(inner_element_size, num_elements);
×
NEW
172
        } else {
×
NEW
173
            return {};
×
NEW
174
        }
×
UNCOV
175
    }
×
176

177
    if (only_symbolic) {
57✔
178
        // Could not statically figure out the size
179
        // Could be struct we could evaluate by its definition or sth. we do not understand here
180
        if (allow_comp_time_eval) {
5✔
181
            return symbolic::size_of_type(type);
4✔
182
        } else { // size unknown
4✔
183
            return {};
1✔
184
        }
1✔
185
    } else { // should just be a primitive type
52✔
186
        auto prim_type = type.primitive_type();
52✔
187
        double size = std::ceil(static_cast<double>(types::bit_width(prim_type)) / 8.0);
52✔
188
        long size_of_type = static_cast<long>(size);
52✔
189
        if (size_of_type != 0) {
52✔
190
            return symbolic::integer(size_of_type);
52✔
191
        } else {
52✔
192
            return {};
×
193
        }
×
194
    }
52✔
195
}
57✔
196

197
const types::IType* peel_to_next_element(const types::IType& type) {
26✔
198
    switch (type.type_id()) {
26✔
199
        case TypeID::Array:
5✔
200
            return &dynamic_cast<const types::Array&>(type).element_type();
5✔
201
        case TypeID::Reference:
×
202
            return &dynamic_cast<const codegen::Reference&>(type).reference_type();
×
203
        case TypeID::Tensor:
1✔
204
            return &dynamic_cast<const types::Tensor&>(type).element_type();
1✔
205
        case TypeID::Pointer: {
19✔
206
            auto& pointer_type = dynamic_cast<const types::Pointer&>(type);
19✔
207
            if (pointer_type.has_pointee_type()) {
19✔
208
                return &pointer_type.pointee_type();
19✔
209
            } else {
19✔
210
                return nullptr;
×
211
            }
×
212
        }
19✔
213
        default:
1✔
214
            return &type;
1✔
215
    }
26✔
216
}
26✔
217

218
bool is_contiguous_type(const types::IType& base_type, StructuredSDFG& sdfg) {
20✔
219
    auto& type = types::peel_to_innermost_element(base_type);
20✔
220
    if (type.type_id() == types::TypeID::Pointer) {
20✔
221
        return false;
×
222
    }
×
223

224
    // Check for distant nests
225
    if (type != types::peel_to_innermost_element(base_type, -1)) {
20✔
226
        return false;
×
227
    }
×
228

229
    // Check for nested structures
230
    if (type.type_id() == types::TypeID::Structure) {
20✔
231
        std::list<types::Structure> structures;
×
232
        std::unordered_set<std::string> visited_structures;
×
233
        structures.push_back(dynamic_cast<const types::Structure&>(type));
×
234
        while (structures.size() > 0) {
×
235
            auto structure = structures.front();
×
236
            structures.pop_front();
×
237
            if (visited_structures.contains(structure.name())) {
×
238
                return false; // infinitely nested structures are not supported
×
239
            }
×
240

241
            visited_structures.insert(structure.name());
×
242
            auto& definition = sdfg.structure(structure.name());
×
243
            for (size_t i = 0; i < definition.num_members(); i++) {
×
244
                auto& member_type = definition.member_type(symbolic::integer(i));
×
245
                if (member_type.type_id() == types::TypeID::Structure) {
×
246
                    structures.push_back(dynamic_cast<const types::Structure&>(member_type));
×
247
                } else if (member_type.type_id() == types::TypeID::Pointer) {
×
248
                    return false; // pointers in structures are not supported
×
249
                }
×
250
            }
×
251
        }
×
252
    }
×
253
    return true;
20✔
254
}
20✔
255

256
} // namespace types
257
} // namespace sdfg
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc